diff --git a/.vscode/settings.json b/.vscode/settings.json index 1c6d6a8f8c189..34ed9fbae2c42 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -44,6 +44,7 @@ "nhooyr", "nolint", "nosec", + "ntqry", "oneof", "parameterscopeid", "promptui", diff --git a/cli/clitest/clitest.go b/cli/clitest/clitest.go index 7166843e93dac..e9fbbd4f23d1d 100644 --- a/cli/clitest/clitest.go +++ b/cli/clitest/clitest.go @@ -1,16 +1,34 @@ package clitest import ( + "archive/tar" "bufio" + "bytes" + "errors" "io" + "os" + "path/filepath" + "regexp" "testing" + "github.com/Netflix/go-expect" "github.com/spf13/cobra" + "github.com/stretchr/testify/require" "github.com/coder/coder/cli" "github.com/coder/coder/cli/config" + "github.com/coder/coder/codersdk" + "github.com/coder/coder/provisioner/echo" ) +var ( + // Used to ensure terminal output doesn't have anything crazy! + // See: https://stackoverflow.com/a/29497680 + stripAnsi = regexp.MustCompile("[\u001B\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[a-zA-Z\\d]*)*)?\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PRZcf-ntqry=><~]))") +) + +// New creates a CLI instance with a configuration pointed to a +// temporary testing directory. func New(t *testing.T, args ...string) (*cobra.Command, config.Root) { cmd := cli.Root() dir := t.TempDir() @@ -19,7 +37,27 @@ func New(t *testing.T, args ...string) (*cobra.Command, config.Root) { return cmd, root } -func StdoutLogs(t *testing.T) io.Writer { +// SetupConfig applies the URL and SessionToken of the client to the config. +func SetupConfig(t *testing.T, client *codersdk.Client, root config.Root) { + err := root.Session().Write(client.SessionToken) + require.NoError(t, err) + err = root.URL().Write(client.URL.String()) + require.NoError(t, err) +} + +// CreateProjectVersionSource writes the echo provisioner responses into a +// new temporary testing directory. +func CreateProjectVersionSource(t *testing.T, responses *echo.Responses) string { + directory := t.TempDir() + data, err := echo.Tar(responses) + require.NoError(t, err) + extractTar(t, data, directory) + return directory +} + +// NewConsole creates a new TTY bound to the command provided. +// All ANSI escape codes are stripped to provide clean output. +func NewConsole(t *testing.T, cmd *cobra.Command) *expect.Console { reader, writer := io.Pipe() scanner := bufio.NewScanner(reader) t.Cleanup(func() { @@ -31,8 +69,46 @@ func StdoutLogs(t *testing.T) io.Writer { if scanner.Err() != nil { return } - t.Log(scanner.Text()) + t.Log(stripAnsi.ReplaceAllString(scanner.Text(), "")) } }() - return writer + + console, err := expect.NewConsole(expect.WithStdout(writer)) + require.NoError(t, err) + cmd.SetIn(console.Tty()) + cmd.SetOut(console.Tty()) + return console +} + +func extractTar(t *testing.T, data []byte, directory string) { + reader := tar.NewReader(bytes.NewBuffer(data)) + for { + header, err := reader.Next() + if errors.Is(err, io.EOF) { + break + } + require.NoError(t, err) + // #nosec + path := filepath.Join(directory, header.Name) + mode := header.FileInfo().Mode() + if mode == 0 { + mode = 0600 + } + switch header.Typeflag { + case tar.TypeDir: + err = os.MkdirAll(path, mode) + require.NoError(t, err) + case tar.TypeReg: + file, err := os.OpenFile(path, os.O_CREATE|os.O_RDWR, mode) + require.NoError(t, err) + // Max file size of 10MB. + _, err = io.CopyN(file, reader, (1<<20)*10) + if errors.Is(err, io.EOF) { + err = nil + } + require.NoError(t, err) + err = file.Close() + require.NoError(t, err) + } + } } diff --git a/cli/clitest/clitest_test.go b/cli/clitest/clitest_test.go new file mode 100644 index 0000000000000..806e04ecc2a4e --- /dev/null +++ b/cli/clitest/clitest_test.go @@ -0,0 +1,31 @@ +//go:build !windows + +package clitest_test + +import ( + "testing" + + "github.com/coder/coder/cli/clitest" + "github.com/coder/coder/coderd/coderdtest" + "github.com/stretchr/testify/require" + "go.uber.org/goleak" +) + +func TestMain(m *testing.M) { + goleak.VerifyTestMain(m) +} + +func TestCli(t *testing.T) { + t.Parallel() + clitest.CreateProjectVersionSource(t, nil) + client := coderdtest.New(t) + cmd, config := clitest.New(t) + clitest.SetupConfig(t, client, config) + console := clitest.NewConsole(t, cmd) + go func() { + err := cmd.Execute() + require.NoError(t, err) + }() + _, err := console.ExpectString("coder") + require.NoError(t, err) +} diff --git a/cli/login.go b/cli/login.go index 73758719d0128..a1df7e905d7dd 100644 --- a/cli/login.go +++ b/cli/login.go @@ -49,7 +49,7 @@ func login() *cobra.Command { } _, _ = fmt.Fprintf(cmd.OutOrStdout(), "%s Your Coder deployment hasn't been set up!\n", color.HiBlackString(">")) - _, err := runPrompt(cmd, &promptui.Prompt{ + _, err := prompt(cmd, &promptui.Prompt{ Label: "Would you like to create the first user?", IsConfirm: true, Default: "y", @@ -61,7 +61,7 @@ func login() *cobra.Command { if err != nil { return xerrors.Errorf("get current user: %w", err) } - username, err := runPrompt(cmd, &promptui.Prompt{ + username, err := prompt(cmd, &promptui.Prompt{ Label: "What username would you like?", Default: currentUser.Username, }) @@ -69,7 +69,7 @@ func login() *cobra.Command { return xerrors.Errorf("pick username prompt: %w", err) } - organization, err := runPrompt(cmd, &promptui.Prompt{ + organization, err := prompt(cmd, &promptui.Prompt{ Label: "What is the name of your organization?", Default: "acme-corp", }) @@ -77,7 +77,7 @@ func login() *cobra.Command { return xerrors.Errorf("pick organization prompt: %w", err) } - email, err := runPrompt(cmd, &promptui.Prompt{ + email, err := prompt(cmd, &promptui.Prompt{ Label: "What's your email?", Validate: func(s string) error { err := validator.New().Var(s, "email") @@ -91,7 +91,7 @@ func login() *cobra.Command { return xerrors.Errorf("specify email prompt: %w", err) } - password, err := runPrompt(cmd, &promptui.Prompt{ + password, err := prompt(cmd, &promptui.Prompt{ Label: "Enter a password:", Mask: '*', }) diff --git a/cli/login_test.go b/cli/login_test.go index f2102177d6710..06f942ee95b9c 100644 --- a/cli/login_test.go +++ b/cli/login_test.go @@ -8,8 +8,6 @@ import ( "github.com/coder/coder/cli/clitest" "github.com/coder/coder/coderd/coderdtest" "github.com/stretchr/testify/require" - - "github.com/Netflix/go-expect" ) func TestLogin(t *testing.T) { @@ -24,12 +22,9 @@ func TestLogin(t *testing.T) { t.Run("InitialUserTTY", func(t *testing.T) { t.Parallel() - console, err := expect.NewConsole(expect.WithStdout(clitest.StdoutLogs(t))) - require.NoError(t, err) client := coderdtest.New(t) root, _ := clitest.New(t, "login", client.URL.String()) - root.SetIn(console.Tty()) - root.SetOut(console.Tty()) + console := clitest.NewConsole(t, root) go func() { err := root.Execute() require.NoError(t, err) @@ -45,12 +40,12 @@ func TestLogin(t *testing.T) { for i := 0; i < len(matches); i += 2 { match := matches[i] value := matches[i+1] - _, err = console.ExpectString(match) + _, err := console.ExpectString(match) require.NoError(t, err) _, err = console.SendLine(value) require.NoError(t, err) } - _, err = console.ExpectString("Welcome to Coder") + _, err := console.ExpectString("Welcome to Coder") require.NoError(t, err) }) } diff --git a/cli/projectcreate.go b/cli/projectcreate.go index eb5219cb816cb..4c67201d5a894 100644 --- a/cli/projectcreate.go +++ b/cli/projectcreate.go @@ -3,7 +3,7 @@ package cli import ( "archive/tar" "bytes" - "context" + "errors" "fmt" "io" "os" @@ -12,17 +12,24 @@ import ( "github.com/briandowns/spinner" "github.com/fatih/color" + "github.com/google/uuid" "github.com/manifoldco/promptui" "github.com/spf13/cobra" "golang.org/x/xerrors" "github.com/coder/coder/coderd" + "github.com/coder/coder/coderd/parameter" "github.com/coder/coder/codersdk" "github.com/coder/coder/database" + "github.com/coder/coder/provisionerd" ) func projectCreate() *cobra.Command { - return &cobra.Command{ + var ( + directory string + provisioner string + ) + cmd := &cobra.Command{ Use: "create", Short: "Create a project from the current directory", RunE: func(cmd *cobra.Command, args []string) error { @@ -34,27 +41,24 @@ func projectCreate() *cobra.Command { if err != nil { return err } - - workingDir, err := os.Getwd() - if err != nil { - return err - } - - _, err = runPrompt(cmd, &promptui.Prompt{ + _, err = prompt(cmd, &promptui.Prompt{ Default: "y", IsConfirm: true, - Label: fmt.Sprintf("Set up %s in your organization?", color.New(color.FgHiCyan).Sprintf("%q", workingDir)), + Label: fmt.Sprintf("Set up %s in your organization?", color.New(color.FgHiCyan).Sprintf("%q", directory)), }) if err != nil { + if errors.Is(err, promptui.ErrAbort) { + return nil + } return err } - name, err := runPrompt(cmd, &promptui.Prompt{ - Default: filepath.Base(workingDir), + name, err := prompt(cmd, &promptui.Prompt{ + Default: filepath.Base(directory), Label: "What's your project's name?", Validate: func(s string) error { - _, err = client.Project(cmd.Context(), organization.Name, s) - if err == nil { + project, _ := client.Project(cmd.Context(), organization.Name, s) + if project.ID.String() != uuid.Nil.String() { return xerrors.New("A project already exists with that name!") } return nil @@ -64,49 +68,159 @@ func projectCreate() *cobra.Command { return err } - spin := spinner.New(spinner.CharSets[0], 50*time.Millisecond) - spin.Suffix = " Uploading current directory..." - spin.Start() - defer spin.Stop() - - bytes, err := tarDirectory(workingDir) + job, err := validateProjectVersionSource(cmd, client, organization, database.ProvisionerType(provisioner), directory) if err != nil { return err } - - resp, err := client.UploadFile(cmd.Context(), codersdk.ContentTypeTar, bytes) + project, err := client.CreateProject(cmd.Context(), organization.Name, coderd.CreateProjectRequest{ + Name: name, + VersionImportJobID: job.ID, + }) if err != nil { return err } - job, err := client.CreateProjectVersionImportProvisionerJob(cmd.Context(), organization.Name, coderd.CreateProjectImportJobRequest{ - StorageMethod: database.ProvisionerStorageMethodFile, - StorageSource: resp.Hash, - Provisioner: database.ProvisionerTypeTerraform, - // SkipResources on first import to detect variables defined by the project. - SkipResources: true, + _, err = prompt(cmd, &promptui.Prompt{ + Label: "Create project?", + IsConfirm: true, + Default: "y", }) if err != nil { + if errors.Is(err, promptui.ErrAbort) { + return nil + } return err } - spin.Stop() - logs, err := client.FollowProvisionerJobLogsAfter(context.Background(), organization.Name, job.ID, time.Time{}) + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "%s The %s project has been created!\n", color.HiBlackString(">"), color.HiCyanString(project.Name)) + _, err = prompt(cmd, &promptui.Prompt{ + Label: "Create a new workspace?", + IsConfirm: true, + Default: "y", + }) if err != nil { - return err - } - for { - log, ok := <-logs - if !ok { - break + if errors.Is(err, promptui.ErrAbort) { + return nil } - _, _ = fmt.Fprintf(cmd.OutOrStdout(), "%s %s\n", color.HiGreenString("[parse]"), log.Output) + return err } - _, _ = fmt.Fprintf(cmd.OutOrStdout(), "Create project %q!\n", name) return nil }, } + currentDirectory, _ := os.Getwd() + cmd.Flags().StringVarP(&directory, "directory", "d", currentDirectory, "Specify the directory to create from") + cmd.Flags().StringVarP(&provisioner, "provisioner", "p", "terraform", "Customize the provisioner backend") + // This is for testing! + err := cmd.Flags().MarkHidden("provisioner") + if err != nil { + panic(err) + } + return cmd +} + +func validateProjectVersionSource(cmd *cobra.Command, client *codersdk.Client, organization coderd.Organization, provisioner database.ProvisionerType, directory string, parameters ...coderd.CreateParameterValueRequest) (*coderd.ProvisionerJob, error) { + spin := spinner.New(spinner.CharSets[5], 100*time.Millisecond) + spin.Writer = cmd.OutOrStdout() + spin.Suffix = " Uploading current directory..." + err := spin.Color("fgHiGreen") + if err != nil { + return nil, err + } + spin.Start() + defer spin.Stop() + + tarData, err := tarDirectory(directory) + if err != nil { + return nil, err + } + resp, err := client.UploadFile(cmd.Context(), codersdk.ContentTypeTar, tarData) + if err != nil { + return nil, err + } + + before := time.Now() + job, err := client.CreateProjectImportJob(cmd.Context(), organization.Name, coderd.CreateProjectImportJobRequest{ + StorageMethod: database.ProvisionerStorageMethodFile, + StorageSource: resp.Hash, + Provisioner: provisioner, + ParameterValues: parameters, + }) + if err != nil { + return nil, err + } + spin.Suffix = " Waiting for the import to complete..." + logs, err := client.ProjectImportJobLogsAfter(cmd.Context(), organization.Name, job.ID, before) + if err != nil { + return nil, err + } + logBuffer := make([]coderd.ProvisionerJobLog, 0, 64) + for { + log, ok := <-logs + if !ok { + break + } + logBuffer = append(logBuffer, log) + } + + job, err = client.ProjectImportJob(cmd.Context(), organization.Name, job.ID) + if err != nil { + return nil, err + } + parameterSchemas, err := client.ProjectImportJobSchemas(cmd.Context(), organization.Name, job.ID) + if err != nil { + return nil, err + } + parameterValues, err := client.ProjectImportJobParameters(cmd.Context(), organization.Name, job.ID) + if err != nil { + return nil, err + } + spin.Stop() + + if provisionerd.IsMissingParameterError(job.Error) { + valuesBySchemaID := map[string]coderd.ComputedParameterValue{} + for _, parameterValue := range parameterValues { + valuesBySchemaID[parameterValue.SchemaID.String()] = parameterValue + } + for _, parameterSchema := range parameterSchemas { + _, ok := valuesBySchemaID[parameterSchema.ID.String()] + if ok { + continue + } + if parameterSchema.Name == parameter.CoderWorkspaceTransition { + continue + } + value, err := prompt(cmd, &promptui.Prompt{ + Label: fmt.Sprintf("Enter value for %s:", color.HiCyanString(parameterSchema.Name)), + }) + if err != nil { + return nil, err + } + parameters = append(parameters, coderd.CreateParameterValueRequest{ + Name: parameterSchema.Name, + SourceValue: value, + SourceScheme: database.ParameterSourceSchemeData, + DestinationScheme: parameterSchema.DefaultDestinationScheme, + }) + } + return validateProjectVersionSource(cmd, client, organization, provisioner, directory, parameters...) + } + + if job.Status != coderd.ProvisionerJobStatusSucceeded { + for _, log := range logBuffer { + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "%s %s\n", color.HiGreenString("[tf]"), log.Output) + } + + return nil, xerrors.New(job.Error) + } + + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "%s Successfully imported project source!\n", color.HiGreenString("✓")) + + resources, err := client.ProjectImportJobResources(cmd.Context(), organization.Name, job.ID) + if err != nil { + return nil, err + } + return &job, displayProjectImportInfo(cmd, parameterSchemas, parameterValues, resources) } func tarDirectory(directory string) ([]byte, error) { diff --git a/cli/projectcreate_test.go b/cli/projectcreate_test.go new file mode 100644 index 0000000000000..ed802475ffe94 --- /dev/null +++ b/cli/projectcreate_test.go @@ -0,0 +1,102 @@ +//go:build !windows + +package cli_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/coder/coder/cli/clitest" + "github.com/coder/coder/coderd/coderdtest" + "github.com/coder/coder/database" + "github.com/coder/coder/provisioner/echo" + "github.com/coder/coder/provisionersdk/proto" +) + +func TestProjectCreate(t *testing.T) { + t.Parallel() + t.Run("NoParameters", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + coderdtest.CreateInitialUser(t, client) + source := clitest.CreateProjectVersionSource(t, &echo.Responses{ + Parse: echo.ParseComplete, + Provision: echo.ProvisionComplete, + }) + cmd, root := clitest.New(t, "projects", "create", "--directory", source, "--provisioner", string(database.ProvisionerTypeEcho)) + clitest.SetupConfig(t, client, root) + _ = coderdtest.NewProvisionerDaemon(t, client) + console := clitest.NewConsole(t, cmd) + closeChan := make(chan struct{}) + go func() { + err := cmd.Execute() + require.NoError(t, err) + close(closeChan) + }() + + matches := []string{ + "organization?", "y", + "name?", "test-project", + "project?", "y", + "created!", "n", + } + for i := 0; i < len(matches); i += 2 { + match := matches[i] + value := matches[i+1] + _, err := console.ExpectString(match) + require.NoError(t, err) + _, err = console.SendLine(value) + require.NoError(t, err) + } + <-closeChan + }) + + t.Run("Parameter", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + coderdtest.CreateInitialUser(t, client) + source := clitest.CreateProjectVersionSource(t, &echo.Responses{ + Parse: []*proto.Parse_Response{{ + Type: &proto.Parse_Response_Complete{ + Complete: &proto.Parse_Complete{ + ParameterSchemas: []*proto.ParameterSchema{{ + Name: "somevar", + DefaultDestination: &proto.ParameterDestination{ + Scheme: proto.ParameterDestination_PROVISIONER_VARIABLE, + }, + }}, + }, + }, + }}, + Provision: echo.ProvisionComplete, + }) + cmd, root := clitest.New(t, "projects", "create", "--directory", source, "--provisioner", string(database.ProvisionerTypeEcho)) + clitest.SetupConfig(t, client, root) + coderdtest.NewProvisionerDaemon(t, client) + console := clitest.NewConsole(t, cmd) + closeChan := make(chan struct{}) + go func() { + err := cmd.Execute() + require.NoError(t, err) + close(closeChan) + }() + + matches := []string{ + "organization?", "y", + "name?", "test-project", + "somevar:", "value", + "project?", "y", + "created!", "n", + } + for i := 0; i < len(matches); i += 2 { + match := matches[i] + value := matches[i+1] + _, err := console.ExpectString(match) + require.NoError(t, err) + _, err = console.SendLine(value) + require.NoError(t, err) + } + <-closeChan + }) +} diff --git a/cli/projects.go b/cli/projects.go index 07d68c0155967..bce9930bd21ca 100644 --- a/cli/projects.go +++ b/cli/projects.go @@ -1,14 +1,22 @@ package cli import ( + "fmt" + "strings" + "github.com/fatih/color" "github.com/spf13/cobra" + "github.com/xlab/treeprint" + "golang.org/x/xerrors" + + "github.com/coder/coder/coderd" + "github.com/coder/coder/database" ) func projects() *cobra.Command { cmd := &cobra.Command{ - Use: "projects", - Long: "Testing something", + Use: "projects", + Aliases: []string{"project"}, Example: ` - Create a project for developers to create workspaces @@ -28,3 +36,46 @@ func projects() *cobra.Command { return cmd } + +func displayProjectImportInfo(cmd *cobra.Command, parameterSchemas []coderd.ParameterSchema, parameterValues []coderd.ComputedParameterValue, resources []coderd.ProjectImportJobResource) error { + schemaByID := map[string]coderd.ParameterSchema{} + for _, schema := range parameterSchemas { + schemaByID[schema.ID.String()] = schema + } + + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "\n %s\n\n", color.HiBlackString("Parameters")) + for _, value := range parameterValues { + schema, ok := schemaByID[value.SchemaID.String()] + if !ok { + return xerrors.Errorf("schema not found: %s", value.Name) + } + displayValue := value.SourceValue + if !schema.RedisplayValue { + displayValue = "" + } + output := fmt.Sprintf("%s %s %s", color.HiCyanString(value.Name), color.HiBlackString("="), displayValue) + if value.DefaultSourceValue { + output += " (default value)" + } else if value.Scope != database.ParameterScopeImportJob { + output += fmt.Sprintf(" (inherited from %s)", value.Scope) + } + + root := treeprint.NewWithRoot(output) + if schema.Description != "" { + root.AddBranch(fmt.Sprintf("%s\n%s", color.HiBlackString("Description"), schema.Description)) + } + if schema.AllowOverrideSource { + root.AddBranch(fmt.Sprintf("%s Users can customize this value!", color.HiYellowString("+"))) + } + _, _ = fmt.Fprintln(cmd.OutOrStdout(), " "+strings.Join(strings.Split(root.String(), "\n"), "\n ")) + } + _, _ = fmt.Fprintf(cmd.OutOrStdout(), " %s\n\n", color.HiBlackString("Resources")) + for _, resource := range resources { + transition := color.HiGreenString("start") + if resource.Transition == database.WorkspaceTransitionStop { + transition = color.HiRedString("stop") + } + _, _ = fmt.Fprintf(cmd.OutOrStdout(), " %s %s on %s\n\n", color.HiCyanString(resource.Type), color.HiCyanString(resource.Name), transition) + } + return nil +} diff --git a/cli/root.go b/cli/root.go index 85db65385291a..9133d7655d133 100644 --- a/cli/root.go +++ b/cli/root.go @@ -69,6 +69,8 @@ func Root() *cobra.Command { return cmd } +// createClient returns a new client from the command context. +// The configuration directory will be read from the global flag. func createClient(cmd *cobra.Command) (*codersdk.Client, error) { root := createConfig(cmd) rawURL, err := root.URL().Read() @@ -84,9 +86,11 @@ func createClient(cmd *cobra.Command) (*codersdk.Client, error) { return nil, err } client := codersdk.New(serverURL) - return client, client.SetSessionToken(token) + client.SessionToken = token + return client, nil } +// currentOrganization returns the currently active organization for the authenticated user. func currentOrganization(cmd *cobra.Command, client *codersdk.Client) (coderd.Organization, error) { orgs, err := client.UserOrganizations(cmd.Context(), "me") if err != nil { @@ -97,6 +101,7 @@ func currentOrganization(cmd *cobra.Command, client *codersdk.Client) (coderd.Or return orgs[0], nil } +// createConfig consumes the global configuration flag to produce a config root. func createConfig(cmd *cobra.Command) config.Root { globalRoot, err := cmd.Flags().GetString(varGlobalConfig) if err != nil { @@ -116,7 +121,7 @@ func isTTY(reader io.Reader) bool { return isatty.IsTerminal(file.Fd()) } -func runPrompt(cmd *cobra.Command, prompt *promptui.Prompt) (string, error) { +func prompt(cmd *cobra.Command, prompt *promptui.Prompt) (string, error) { var ok bool prompt.Stdin, ok = cmd.InOrStdin().(io.ReadCloser) if !ok { @@ -161,6 +166,16 @@ func runPrompt(cmd *cobra.Command, prompt *promptui.Prompt) (string, error) { Invalid: invalid, Valid: valid, } + oldValidate := prompt.Validate + if oldValidate != nil { + // Override the validate function to pass our default! + prompt.Validate = func(s string) error { + if s == "" { + s = defaultValue + } + return oldValidate(s) + } + } value, err := prompt.Run() if value == "" && !prompt.IsConfirm { value = defaultValue diff --git a/cli/workspacecreate.go b/cli/workspacecreate.go new file mode 100644 index 0000000000000..be883ebaffe00 --- /dev/null +++ b/cli/workspacecreate.go @@ -0,0 +1,131 @@ +package cli + +import ( + "errors" + "fmt" + "time" + + "github.com/fatih/color" + "github.com/google/uuid" + "github.com/manifoldco/promptui" + "github.com/spf13/cobra" + "golang.org/x/xerrors" + + "github.com/coder/coder/coderd" + "github.com/coder/coder/database" +) + +func workspaceCreate() *cobra.Command { + cmd := &cobra.Command{ + Use: "create [name]", + Short: "Create a workspace from a project", + RunE: func(cmd *cobra.Command, args []string) error { + client, err := createClient(cmd) + if err != nil { + return err + } + organization, err := currentOrganization(cmd, client) + if err != nil { + return err + } + + var name string + if len(args) >= 2 { + name = args[1] + } else { + name, err = prompt(cmd, &promptui.Prompt{ + Label: "What's your workspace's name?", + Validate: func(s string) error { + if s == "" { + return xerrors.Errorf("You must provide a name!") + } + workspace, _ := client.Workspace(cmd.Context(), "", s) + if workspace.ID.String() != uuid.Nil.String() { + return xerrors.New("A workspace already exists with that name!") + } + return nil + }, + }) + if err != nil { + if errors.Is(err, promptui.ErrAbort) { + return nil + } + return err + } + } + + _, _ = fmt.Fprintf(cmd.OutOrStdout(), "%s Previewing project create...\n", color.HiBlackString(">")) + + project, err := client.Project(cmd.Context(), organization.Name, args[0]) + if err != nil { + return err + } + projectVersion, err := client.ProjectVersion(cmd.Context(), organization.Name, project.Name, project.ActiveVersionID.String()) + if err != nil { + return err + } + parameterSchemas, err := client.ProjectImportJobSchemas(cmd.Context(), organization.Name, projectVersion.ImportJobID) + if err != nil { + return err + } + parameterValues, err := client.ProjectImportJobParameters(cmd.Context(), organization.Name, projectVersion.ImportJobID) + if err != nil { + return err + } + resources, err := client.ProjectImportJobResources(cmd.Context(), organization.Name, projectVersion.ImportJobID) + if err != nil { + return err + } + err = displayProjectImportInfo(cmd, parameterSchemas, parameterValues, resources) + if err != nil { + return err + } + + _, err = prompt(cmd, &promptui.Prompt{ + Label: fmt.Sprintf("Create workspace %s?", color.HiCyanString(name)), + Default: "y", + IsConfirm: true, + }) + if err != nil { + if errors.Is(err, promptui.ErrAbort) { + return nil + } + return err + } + + workspace, err := client.CreateWorkspace(cmd.Context(), "", coderd.CreateWorkspaceRequest{ + ProjectID: project.ID, + Name: name, + }) + if err != nil { + return err + } + history, err := client.CreateWorkspaceHistory(cmd.Context(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ + ProjectVersionID: projectVersion.ID, + Transition: database.WorkspaceTransitionStart, + }) + if err != nil { + return err + } + + logs, err := client.WorkspaceProvisionJobLogsAfter(cmd.Context(), organization.Name, history.ProvisionJobID, time.Time{}) + if err != nil { + return err + } + for { + log, ok := <-logs + if !ok { + break + } + _, _ = fmt.Printf("Terraform: %s\n", log.Output) + } + + // This command is WIP, and output will change! + + _, _ = fmt.Printf("Created workspace! %s\n", name) + return nil + }, + } + + return cmd +} diff --git a/cli/workspacecreate_test.go b/cli/workspacecreate_test.go new file mode 100644 index 0000000000000..138e0ee1e61d6 --- /dev/null +++ b/cli/workspacecreate_test.go @@ -0,0 +1,64 @@ +//go:build !windows + +package cli_test + +import ( + "testing" + + "github.com/coder/coder/cli/clitest" + "github.com/coder/coder/coderd/coderdtest" + "github.com/coder/coder/provisioner/echo" + "github.com/coder/coder/provisionersdk/proto" + "github.com/stretchr/testify/require" +) + +func TestWorkspaceCreate(t *testing.T) { + t.Parallel() + t.Run("Create", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + _ = coderdtest.NewProvisionerDaemon(t, client) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{ + Parse: echo.ParseComplete, + Provision: []*proto.Provision_Response{{ + Type: &proto.Provision_Response_Complete{ + Complete: &proto.Provision_Complete{ + Resources: []*proto.Resource{{ + Name: "example", + Type: "aws_instance", + }}, + }, + }, + }}, + }) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) + project := coderdtest.CreateProject(t, client, user.Organization, job.ID) + cmd, root := clitest.New(t, "workspaces", "create", project.Name) + clitest.SetupConfig(t, client, root) + + console := clitest.NewConsole(t, cmd) + closeChan := make(chan struct{}) + go func() { + err := cmd.Execute() + require.NoError(t, err) + close(closeChan) + }() + + matches := []string{ + "name?", "workspace-name", + "Create workspace", "y", + } + for i := 0; i < len(matches); i += 2 { + match := matches[i] + value := matches[i+1] + _, err := console.ExpectString(match) + require.NoError(t, err) + _, err = console.SendLine(value) + require.NoError(t, err) + } + _, err := console.ExpectString("Create") + require.NoError(t, err) + <-closeChan + }) +} diff --git a/cli/workspaces.go b/cli/workspaces.go index 4140d8c9ed7a2..d405f00cea88b 100644 --- a/cli/workspaces.go +++ b/cli/workspaces.go @@ -6,6 +6,7 @@ func workspaces() *cobra.Command { cmd := &cobra.Command{ Use: "workspaces", } + cmd.AddCommand(workspaceCreate()) return cmd } diff --git a/coderd/coderd.go b/coderd/coderd.go index fcc6530c6811c..9669dbf92c2e1 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -73,7 +73,6 @@ func New(options *Options) http.Handler { r.Route("/{projectversion}", func(r chi.Router) { r.Use(httpmw.ExtractProjectVersionParam(api.Database)) r.Get("/", api.projectVersionByOrganizationAndName) - r.Get("/parameters", api.projectVersionParametersByOrganizationAndName) }) }) }) @@ -108,24 +107,38 @@ func New(options *Options) http.Handler { r.Post("/", api.postFiles) }) - r.Route("/provisioners", func(r chi.Router) { - r.Route("/daemons", func(r chi.Router) { - r.Get("/", api.provisionerDaemons) - r.Get("/serve", api.provisionerDaemonsServe) + r.Route("/projectimport/{organization}", func(r chi.Router) { + r.Use( + httpmw.ExtractAPIKey(options.Database, nil), + httpmw.ExtractOrganizationParam(options.Database), + ) + r.Post("/", api.postProjectImportByOrganization) + r.Route("/{provisionerjob}", func(r chi.Router) { + r.Use(httpmw.ExtractProvisionerJobParam(options.Database)) + r.Get("/", api.provisionerJobByID) + r.Get("/schemas", api.projectImportJobSchemasByID) + r.Get("/parameters", api.projectImportJobParametersByID) + r.Get("/resources", api.projectImportJobResourcesByID) + r.Get("/logs", api.provisionerJobLogsByID) }) - r.Route("/jobs/{organization}", func(r chi.Router) { - r.Use( - httpmw.ExtractAPIKey(options.Database, nil), - httpmw.ExtractOrganizationParam(options.Database), - ) - r.Post("/import", api.postProvisionerImportJobByOrganization) - r.Route("/{provisionerjob}", func(r chi.Router) { - r.Use(httpmw.ExtractProvisionerJobParam(options.Database)) - r.Get("/", api.provisionerJobByOrganization) - r.Get("/logs", api.provisionerJobLogsByID) - }) + }) + + r.Route("/workspaceprovision/{organization}", func(r chi.Router) { + r.Use( + httpmw.ExtractAPIKey(options.Database, nil), + httpmw.ExtractOrganizationParam(options.Database), + ) + r.Route("/{provisionerjob}", func(r chi.Router) { + r.Use(httpmw.ExtractProvisionerJobParam(options.Database)) + r.Get("/", api.provisionerJobByID) + r.Get("/logs", api.provisionerJobLogsByID) }) }) + + r.Route("/provisioners/daemons", func(r chi.Router) { + r.Get("/", api.provisionerDaemons) + r.Get("/serve", api.provisionerDaemonsServe) + }) }) r.NotFound(site.Handler(options.Logger).ServeHTTP) return r diff --git a/coderd/coderdtest/coderdtest.go b/coderd/coderdtest/coderdtest.go index 82295ef35af9b..dc7f782c83748 100644 --- a/coderd/coderdtest/coderdtest.go +++ b/coderd/coderdtest/coderdtest.go @@ -28,8 +28,8 @@ import ( "github.com/coder/coder/provisionersdk/proto" ) -// New constructs a new coderd test instance. This returned Server -// should contain no side-effects. +// New constructs an in-memory coderd instance and returns +// the connected client. func New(t *testing.T) *codersdk.Client { // This can be hotswapped for a live database instance. db := databasefake.New() @@ -117,20 +117,19 @@ func CreateInitialUser(t *testing.T, client *codersdk.Client) coderd.CreateIniti Password: req.Password, }) require.NoError(t, err) - err = client.SetSessionToken(login.SessionToken) - require.NoError(t, err) + client.SessionToken = login.SessionToken return req } -// CreateProjectImportProvisionerJob creates a project import provisioner job +// CreateProjectImportJob creates a project import provisioner job // with the responses provided. It uses the "echo" provisioner for compatibility // with testing. -func CreateProjectImportProvisionerJob(t *testing.T, client *codersdk.Client, organization string, res *echo.Responses) coderd.ProvisionerJob { +func CreateProjectImportJob(t *testing.T, client *codersdk.Client, organization string, res *echo.Responses) coderd.ProvisionerJob { data, err := echo.Tar(res) require.NoError(t, err) file, err := client.UploadFile(context.Background(), codersdk.ContentTypeTar, data) require.NoError(t, err) - job, err := client.CreateProjectVersionImportProvisionerJob(context.Background(), organization, coderd.CreateProjectImportJobRequest{ + job, err := client.CreateProjectImportJob(context.Background(), organization, coderd.CreateProjectImportJobRequest{ StorageSource: file.Hash, StorageMethod: database.ProvisionerStorageMethodFile, Provisioner: database.ProvisionerTypeEcho, @@ -150,12 +149,24 @@ func CreateProject(t *testing.T, client *codersdk.Client, organization string, j return project } -// AwaitProvisionerJob awaits for a job to reach completed status. -func AwaitProvisionerJob(t *testing.T, client *codersdk.Client, organization string, job uuid.UUID) coderd.ProvisionerJob { +// AwaitProjectImportJob awaits for an import job to reach completed status. +func AwaitProjectImportJob(t *testing.T, client *codersdk.Client, organization string, job uuid.UUID) coderd.ProvisionerJob { + var provisionerJob coderd.ProvisionerJob + require.Eventually(t, func() bool { + var err error + provisionerJob, err = client.ProjectImportJob(context.Background(), organization, job) + require.NoError(t, err) + return provisionerJob.Status.Completed() + }, 3*time.Second, 25*time.Millisecond) + return provisionerJob +} + +// AwaitWorkspaceProvisionJob awaits for a workspace provision job to reach completed status. +func AwaitWorkspaceProvisionJob(t *testing.T, client *codersdk.Client, organization string, job uuid.UUID) coderd.ProvisionerJob { var provisionerJob coderd.ProvisionerJob require.Eventually(t, func() bool { var err error - provisionerJob, err = client.ProvisionerJob(context.Background(), organization, job) + provisionerJob, err = client.WorkspaceProvisionJob(context.Background(), organization, job) require.NoError(t, err) return provisionerJob.Status.Completed() }, 3*time.Second, 25*time.Millisecond) diff --git a/coderd/coderdtest/coderdtest_test.go b/coderd/coderdtest/coderdtest_test.go index b388ea6428f43..f351bb4a4d5a4 100644 --- a/coderd/coderdtest/coderdtest_test.go +++ b/coderd/coderdtest/coderdtest_test.go @@ -22,8 +22,8 @@ func TestNew(t *testing.T) { client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) closer := coderdtest.NewProvisionerDaemon(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) - coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID) history, err := client.CreateWorkspaceHistory(context.Background(), "me", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ @@ -31,6 +31,6 @@ func TestNew(t *testing.T) { Transition: database.WorkspaceTransitionStart, }) require.NoError(t, err) - coderdtest.AwaitProvisionerJob(t, client, user.Organization, history.ProvisionJobID) + coderdtest.AwaitWorkspaceProvisionJob(t, client, user.Organization, history.ProvisionJobID) closer.Close() } diff --git a/coderd/files.go b/coderd/files.go index 01337b9d2e143..069509c567ca6 100644 --- a/coderd/files.go +++ b/coderd/files.go @@ -40,8 +40,18 @@ func (api *api) postFiles(rw http.ResponseWriter, r *http.Request) { return } hashBytes := sha256.Sum256(data) - file, err := api.Database.InsertFile(r.Context(), database.InsertFileParams{ - Hash: hex.EncodeToString(hashBytes[:]), + hash := hex.EncodeToString(hashBytes[:]) + file, err := api.Database.GetFileByHash(r.Context(), hash) + if err == nil { + // The file already exists! + render.Status(r, http.StatusOK) + render.JSON(rw, r, UploadFileResponse{ + Hash: file.Hash, + }) + return + } + file, err = api.Database.InsertFile(r.Context(), database.InsertFileParams{ + Hash: hash, CreatedBy: apiKey.UserID, CreatedAt: database.Now(), Mimetype: contentType, diff --git a/coderd/files_test.go b/coderd/files_test.go index 2ffa455df7e81..d14c1aff99fed 100644 --- a/coderd/files_test.go +++ b/coderd/files_test.go @@ -27,4 +27,15 @@ func TestPostFiles(t *testing.T) { _, err := client.UploadFile(context.Background(), codersdk.ContentTypeTar, make([]byte, 1024)) require.NoError(t, err) }) + + t.Run("InsertAlreadyExists", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + _ = coderdtest.CreateInitialUser(t, client) + data := make([]byte, 1024) + _, err := client.UploadFile(context.Background(), codersdk.ContentTypeTar, data) + require.NoError(t, err) + _, err = client.UploadFile(context.Background(), codersdk.ContentTypeTar, data) + require.NoError(t, err) + }) } diff --git a/coderd/parameter/compute.go b/coderd/parameter/compute.go new file mode 100644 index 0000000000000..d7c68a5ac30e1 --- /dev/null +++ b/coderd/parameter/compute.go @@ -0,0 +1,215 @@ +package parameter + +import ( + "context" + "database/sql" + "errors" + + "github.com/google/uuid" + "golang.org/x/xerrors" + + "github.com/coder/coder/database" +) + +const ( + CoderUsername = "coder_username" + CoderWorkspaceTransition = "coder_workspace_transition" +) + +// ComputeScope targets identifiers to pull parameters from. +type ComputeScope struct { + ProjectImportJobID uuid.UUID + OrganizationID string + UserID string + ProjectID uuid.NullUUID + WorkspaceID uuid.NullUUID +} + +type ComputeOptions struct { + // HideRedisplayValues removes the value from parameters that + // come from schemas with RedisplayValue set to false. + HideRedisplayValues bool +} + +// ComputedValue represents a computed parameter value. +type ComputedValue struct { + database.ParameterValue + SchemaID uuid.UUID `json:"schema_id"` + DefaultSourceValue bool `json:"default_source_value"` +} + +// Compute accepts a scope in which parameter values are sourced. +// These sources are iterated in a hierarchical fashion to determine +// the runtime parameter values for schemas provided. +func Compute(ctx context.Context, db database.Store, scope ComputeScope, options *ComputeOptions) ([]ComputedValue, error) { + if options == nil { + options = &ComputeOptions{} + } + compute := &compute{ + options: options, + db: db, + computedParameterByName: map[string]ComputedValue{}, + parameterSchemasByName: map[string]database.ParameterSchema{}, + } + + // All parameters for the import job ID! + parameterSchemas, err := db.GetParameterSchemasByJobID(ctx, scope.ProjectImportJobID) + if errors.Is(err, sql.ErrNoRows) { + err = nil + } + if err != nil { + return nil, xerrors.Errorf("get project parameters: %w", err) + } + for _, parameterSchema := range parameterSchemas { + compute.parameterSchemasByName[parameterSchema.Name] = parameterSchema + } + + // Organization parameters come first! + err = compute.injectScope(ctx, database.GetParameterValuesByScopeParams{ + Scope: database.ParameterScopeOrganization, + ScopeID: scope.OrganizationID, + }) + if err != nil { + return nil, err + } + + // Job parameters come second! + err = compute.injectScope(ctx, database.GetParameterValuesByScopeParams{ + Scope: database.ParameterScopeImportJob, + ScopeID: scope.ProjectImportJobID.String(), + }) + if err != nil { + return nil, err + } + + // Default project parameter values come second! + for _, parameterSchema := range parameterSchemas { + if parameterSchema.DefaultSourceScheme == database.ParameterSourceSchemeNone { + continue + } + if _, ok := compute.computedParameterByName[parameterSchema.Name]; ok { + // We already have a value! No need to use the default. + continue + } + + switch parameterSchema.DefaultSourceScheme { + case database.ParameterSourceSchemeData: + // Inject a default value scoped to the import job ID. + // This doesn't need to be inserted into the database, + // because it's a dynamic value associated with the schema. + err = compute.injectSingle(database.ParameterValue{ + ID: uuid.New(), + CreatedAt: database.Now(), + UpdatedAt: database.Now(), + SourceScheme: database.ParameterSourceSchemeData, + Name: parameterSchema.Name, + DestinationScheme: parameterSchema.DefaultDestinationScheme, + SourceValue: parameterSchema.DefaultSourceValue, + Scope: database.ParameterScopeImportJob, + ScopeID: scope.ProjectImportJobID.String(), + }, true) + if err != nil { + return nil, xerrors.Errorf("insert default value: %w", err) + } + default: + return nil, xerrors.Errorf("unsupported source scheme for project version parameter %q: %q", parameterSchema.Name, string(parameterSchema.DefaultSourceScheme)) + } + } + + if scope.ProjectID.Valid { + // Project parameters come third! + err = compute.injectScope(ctx, database.GetParameterValuesByScopeParams{ + Scope: database.ParameterScopeProject, + ScopeID: scope.ProjectID.UUID.String(), + }) + if err != nil { + return nil, err + } + } + + // User parameters come fourth! + err = compute.injectScope(ctx, database.GetParameterValuesByScopeParams{ + Scope: database.ParameterScopeUser, + ScopeID: scope.UserID, + }) + if err != nil { + return nil, err + } + + if scope.WorkspaceID.Valid { + // Workspace parameters come last! + err = compute.injectScope(ctx, database.GetParameterValuesByScopeParams{ + Scope: database.ParameterScopeWorkspace, + ScopeID: scope.WorkspaceID.UUID.String(), + }) + if err != nil { + return nil, err + } + } + + values := make([]ComputedValue, 0, len(compute.computedParameterByName)) + for _, value := range compute.computedParameterByName { + values = append(values, value) + } + return values, nil +} + +type compute struct { + options *ComputeOptions + db database.Store + computedParameterByName map[string]ComputedValue + parameterSchemasByName map[string]database.ParameterSchema +} + +// Validates and computes the value for parameters; setting the value on "parameterByName". +func (c *compute) injectScope(ctx context.Context, scopeParams database.GetParameterValuesByScopeParams) error { + scopedParameters, err := c.db.GetParameterValuesByScope(ctx, scopeParams) + if errors.Is(err, sql.ErrNoRows) { + err = nil + } + if err != nil { + return xerrors.Errorf("get %s parameters: %w", scopeParams.Scope, err) + } + + for _, scopedParameter := range scopedParameters { + err = c.injectSingle(scopedParameter, false) + if err != nil { + return xerrors.Errorf("inject single %q: %w", scopedParameter.Name, err) + } + } + return nil +} + +func (c *compute) injectSingle(scopedParameter database.ParameterValue, defaultValue bool) error { + parameterSchema, hasParameterSchema := c.parameterSchemasByName[scopedParameter.Name] + if !hasParameterSchema { + // Don't inject parameters that aren't defined by the project. + return nil + } + + _, hasParameterValue := c.computedParameterByName[scopedParameter.Name] + if hasParameterValue { + if !parameterSchema.AllowOverrideSource && + // Users and workspaces cannot override anything on a project! + (scopedParameter.Scope == database.ParameterScopeUser || + scopedParameter.Scope == database.ParameterScopeWorkspace) { + return nil + } + } + + switch scopedParameter.SourceScheme { + case database.ParameterSourceSchemeData: + value := ComputedValue{ + ParameterValue: scopedParameter, + SchemaID: parameterSchema.ID, + DefaultSourceValue: defaultValue, + } + if c.options.HideRedisplayValues && !parameterSchema.RedisplayValue { + value.SourceValue = "" + } + c.computedParameterByName[scopedParameter.Name] = value + default: + return xerrors.Errorf("unsupported source scheme: %q", string(parameterSchema.DefaultSourceScheme)) + } + return nil +} diff --git a/coderd/parameter/compute_test.go b/coderd/parameter/compute_test.go new file mode 100644 index 0000000000000..660be359975a5 --- /dev/null +++ b/coderd/parameter/compute_test.go @@ -0,0 +1,222 @@ +package parameter_test + +import ( + "context" + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/coderd/parameter" + "github.com/coder/coder/cryptorand" + "github.com/coder/coder/database" + "github.com/coder/coder/database/databasefake" +) + +func TestCompute(t *testing.T) { + t.Parallel() + generateScope := func() parameter.ComputeScope { + return parameter.ComputeScope{ + ProjectImportJobID: uuid.New(), + OrganizationID: uuid.NewString(), + ProjectID: uuid.NullUUID{ + UUID: uuid.New(), + Valid: true, + }, + WorkspaceID: uuid.NullUUID{ + UUID: uuid.New(), + Valid: true, + }, + UserID: uuid.NewString(), + } + } + type parameterOptions struct { + AllowOverrideSource bool + AllowOverrideDestination bool + DefaultDestinationScheme database.ParameterDestinationScheme + ProjectImportJobID uuid.UUID + } + generateParameter := func(t *testing.T, db database.Store, opts parameterOptions) database.ParameterSchema { + if opts.DefaultDestinationScheme == "" { + opts.DefaultDestinationScheme = database.ParameterDestinationSchemeEnvironmentVariable + } + name, err := cryptorand.String(8) + require.NoError(t, err) + sourceValue, err := cryptorand.String(8) + require.NoError(t, err) + param, err := db.InsertParameterSchema(context.Background(), database.InsertParameterSchemaParams{ + ID: uuid.New(), + Name: name, + JobID: opts.ProjectImportJobID, + DefaultSourceScheme: database.ParameterSourceSchemeData, + DefaultSourceValue: sourceValue, + AllowOverrideSource: opts.AllowOverrideSource, + AllowOverrideDestination: opts.AllowOverrideDestination, + DefaultDestinationScheme: opts.DefaultDestinationScheme, + }) + require.NoError(t, err) + return param + } + + t.Run("NoValue", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + scope := generateScope() + _, err := db.InsertParameterSchema(context.Background(), database.InsertParameterSchemaParams{ + ID: uuid.New(), + JobID: scope.ProjectImportJobID, + Name: "hey", + DefaultSourceScheme: database.ParameterSourceSchemeNone, + }) + require.NoError(t, err) + computed, err := parameter.Compute(context.Background(), db, scope, nil) + require.NoError(t, err) + require.Len(t, computed, 0) + }) + + t.Run("UseDefaultProjectValue", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + scope := generateScope() + parameterSchema := generateParameter(t, db, parameterOptions{ + ProjectImportJobID: scope.ProjectImportJobID, + DefaultDestinationScheme: database.ParameterDestinationSchemeProvisionerVariable, + }) + computed, err := parameter.Compute(context.Background(), db, scope, nil) + require.NoError(t, err) + require.Len(t, computed, 1) + computedValue := computed[0] + require.True(t, computedValue.DefaultSourceValue) + require.Equal(t, database.ParameterScopeImportJob, computedValue.Scope) + require.Equal(t, scope.ProjectImportJobID.String(), computedValue.ScopeID) + require.Equal(t, computedValue.SourceValue, parameterSchema.DefaultSourceValue) + }) + + t.Run("OverrideOrganizationWithImportJob", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + scope := generateScope() + parameterSchema := generateParameter(t, db, parameterOptions{ + ProjectImportJobID: scope.ProjectImportJobID, + }) + _, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{ + ID: uuid.New(), + Name: parameterSchema.Name, + Scope: database.ParameterScopeOrganization, + ScopeID: scope.OrganizationID, + SourceScheme: database.ParameterSourceSchemeData, + SourceValue: "firstnop", + DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable, + }) + require.NoError(t, err) + + value, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{ + ID: uuid.New(), + Name: parameterSchema.Name, + Scope: database.ParameterScopeImportJob, + ScopeID: scope.ProjectImportJobID.String(), + SourceScheme: database.ParameterSourceSchemeData, + SourceValue: "secondnop", + DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable, + }) + require.NoError(t, err) + + computed, err := parameter.Compute(context.Background(), db, scope, nil) + require.NoError(t, err) + require.Len(t, computed, 1) + require.Equal(t, false, computed[0].DefaultSourceValue) + require.Equal(t, value.SourceValue, computed[0].SourceValue) + }) + + t.Run("ProjectOverridesProjectDefault", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + scope := generateScope() + parameterSchema := generateParameter(t, db, parameterOptions{ + ProjectImportJobID: scope.ProjectImportJobID, + }) + value, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{ + ID: uuid.New(), + Name: parameterSchema.Name, + Scope: database.ParameterScopeProject, + ScopeID: scope.ProjectID.UUID.String(), + SourceScheme: database.ParameterSourceSchemeData, + SourceValue: "nop", + DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable, + }) + require.NoError(t, err) + + computed, err := parameter.Compute(context.Background(), db, scope, nil) + require.NoError(t, err) + require.Len(t, computed, 1) + require.Equal(t, false, computed[0].DefaultSourceValue) + require.Equal(t, value.SourceValue, computed[0].SourceValue) + }) + + t.Run("WorkspaceCannotOverwriteProjectDefault", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + scope := generateScope() + parameterSchema := generateParameter(t, db, parameterOptions{ + ProjectImportJobID: scope.ProjectImportJobID, + }) + _, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{ + ID: uuid.New(), + Name: parameterSchema.Name, + Scope: database.ParameterScopeWorkspace, + ScopeID: scope.WorkspaceID.UUID.String(), + SourceScheme: database.ParameterSourceSchemeData, + SourceValue: "nop", + DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable, + }) + require.NoError(t, err) + + computed, err := parameter.Compute(context.Background(), db, scope, nil) + require.NoError(t, err) + require.Len(t, computed, 1) + require.Equal(t, true, computed[0].DefaultSourceValue) + }) + + t.Run("WorkspaceOverwriteProjectDefault", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + scope := generateScope() + parameterSchema := generateParameter(t, db, parameterOptions{ + AllowOverrideSource: true, + ProjectImportJobID: scope.ProjectImportJobID, + }) + _, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{ + ID: uuid.New(), + Name: parameterSchema.Name, + Scope: database.ParameterScopeWorkspace, + ScopeID: scope.WorkspaceID.UUID.String(), + SourceScheme: database.ParameterSourceSchemeData, + SourceValue: "nop", + DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable, + }) + require.NoError(t, err) + + computed, err := parameter.Compute(context.Background(), db, scope, nil) + require.NoError(t, err) + require.Len(t, computed, 1) + require.Equal(t, false, computed[0].DefaultSourceValue) + }) + + t.Run("HideRedisplay", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + scope := generateScope() + _ = generateParameter(t, db, parameterOptions{ + ProjectImportJobID: scope.ProjectImportJobID, + DefaultDestinationScheme: database.ParameterDestinationSchemeProvisionerVariable, + }) + computed, err := parameter.Compute(context.Background(), db, scope, ¶meter.ComputeOptions{ + HideRedisplayValues: true, + }) + require.NoError(t, err) + require.Len(t, computed, 1) + computedValue := computed[0] + require.True(t, computedValue.DefaultSourceValue) + require.Equal(t, computedValue.SourceValue, "") + }) +} diff --git a/coderd/parameters.go b/coderd/parameters.go deleted file mode 100644 index b3aa1d4967bd9..0000000000000 --- a/coderd/parameters.go +++ /dev/null @@ -1,107 +0,0 @@ -package coderd - -import ( - "database/sql" - "errors" - "fmt" - "net/http" - "time" - - "github.com/go-chi/render" - "github.com/google/uuid" - - "github.com/coder/coder/database" - "github.com/coder/coder/httpapi" -) - -// CreateParameterValueRequest is used to create a new parameter value for a scope. -type CreateParameterValueRequest struct { - Name string `json:"name" validate:"required"` - SourceValue string `json:"source_value" validate:"required"` - SourceScheme database.ParameterSourceScheme `json:"source_scheme" validate:"oneof=data,required"` - DestinationScheme database.ParameterDestinationScheme `json:"destination_scheme" validate:"oneof=environment_variable provisioner_variable,required"` - DestinationValue string `json:"destination_value" validate:"required"` -} - -// ParameterValue represents a set value for the scope. -type ParameterValue struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` - CreatedAt time.Time `json:"created_at"` - UpdatedAt time.Time `json:"updated_at"` - Scope database.ParameterScope `json:"scope"` - ScopeID string `json:"scope_id"` - SourceScheme database.ParameterSourceScheme `json:"source_scheme"` - DestinationScheme database.ParameterDestinationScheme `json:"destination_scheme"` - DestinationValue string `json:"destination_value"` -} - -// Abstracts creating parameters into a single request/response format. -// Callers are in charge of validating the requester has permissions to -// perform the creation. -func postParameterValueForScope(rw http.ResponseWriter, r *http.Request, db database.Store, scope database.ParameterScope, scopeID string) { - var createRequest CreateParameterValueRequest - if !httpapi.Read(rw, r, &createRequest) { - return - } - parameterValue, err := db.InsertParameterValue(r.Context(), database.InsertParameterValueParams{ - ID: uuid.New(), - Name: createRequest.Name, - CreatedAt: database.Now(), - UpdatedAt: database.Now(), - Scope: scope, - ScopeID: scopeID, - SourceScheme: createRequest.SourceScheme, - SourceValue: createRequest.SourceValue, - DestinationScheme: createRequest.DestinationScheme, - DestinationValue: createRequest.DestinationValue, - }) - if err != nil { - httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ - Message: fmt.Sprintf("insert parameter value: %s", err), - }) - return - } - - render.Status(r, http.StatusCreated) - render.JSON(rw, r, parameterValue) -} - -// Abstracts returning parameters for a scope into a standardized -// request/response format. Callers are responsible for checking -// requester permissions. -func parametersForScope(rw http.ResponseWriter, r *http.Request, db database.Store, req database.GetParameterValuesByScopeParams) { - parameterValues, err := db.GetParameterValuesByScope(r.Context(), req) - if errors.Is(err, sql.ErrNoRows) { - err = nil - parameterValues = []database.ParameterValue{} - } - if err != nil { - httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ - Message: fmt.Sprintf("get parameter values: %s", err), - }) - return - } - - apiParameterValues := make([]ParameterValue, 0, len(parameterValues)) - for _, parameterValue := range parameterValues { - apiParameterValues = append(apiParameterValues, convertParameterValue(parameterValue)) - } - - render.Status(r, http.StatusOK) - render.JSON(rw, r, apiParameterValues) -} - -func convertParameterValue(parameterValue database.ParameterValue) ParameterValue { - return ParameterValue{ - ID: parameterValue.ID, - Name: parameterValue.Name, - CreatedAt: parameterValue.CreatedAt, - UpdatedAt: parameterValue.UpdatedAt, - Scope: parameterValue.Scope, - ScopeID: parameterValue.ScopeID, - SourceScheme: parameterValue.SourceScheme, - DestinationScheme: parameterValue.DestinationScheme, - DestinationValue: parameterValue.DestinationValue, - } -} diff --git a/coderd/projectimport.go b/coderd/projectimport.go new file mode 100644 index 0000000000000..d3032f9b1eee5 --- /dev/null +++ b/coderd/projectimport.go @@ -0,0 +1,185 @@ +package coderd + +import ( + "database/sql" + "errors" + "fmt" + "net/http" + + "github.com/go-chi/render" + "github.com/google/uuid" + + "github.com/coder/coder/coderd/parameter" + "github.com/coder/coder/database" + "github.com/coder/coder/httpapi" + "github.com/coder/coder/httpmw" +) + +// ParameterSchema represents a parameter parsed from project version source. +type ParameterSchema database.ParameterSchema + +// ComputedParameterValue represents a computed parameter value. +type ComputedParameterValue parameter.ComputedValue + +// ProjectImportJobResource is a resource created by a project import job. +type ProjectImportJobResource database.ProjectImportJobResource + +// CreateProjectImportJobRequest provides options to create a project import job. +type CreateProjectImportJobRequest struct { + StorageMethod database.ProvisionerStorageMethod `json:"storage_method" validate:"oneof=file,required"` + StorageSource string `json:"storage_source" validate:"required"` + Provisioner database.ProvisionerType `json:"provisioner" validate:"oneof=terraform echo,required"` + // ParameterValues allows for additional parameters to be provided + // during the dry-run provision stage. + ParameterValues []CreateParameterValueRequest `json:"parameter_values"` +} + +// Create a new project import job! +func (api *api) postProjectImportByOrganization(rw http.ResponseWriter, r *http.Request) { + apiKey := httpmw.APIKey(r) + organization := httpmw.OrganizationParam(r) + var req CreateProjectImportJobRequest + if !httpapi.Read(rw, r, &req) { + return + } + file, err := api.Database.GetFileByHash(r.Context(), req.StorageSource) + if errors.Is(err, sql.ErrNoRows) { + httpapi.Write(rw, http.StatusBadRequest, httpapi.Response{ + Message: "file not found", + }) + return + } + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("get file: %s", err), + }) + return + } + + jobID := uuid.New() + for _, parameterValue := range req.ParameterValues { + _, err = api.Database.InsertParameterValue(r.Context(), database.InsertParameterValueParams{ + ID: uuid.New(), + Name: parameterValue.Name, + CreatedAt: database.Now(), + UpdatedAt: database.Now(), + Scope: database.ParameterScopeImportJob, + ScopeID: jobID.String(), + SourceScheme: parameterValue.SourceScheme, + SourceValue: parameterValue.SourceValue, + DestinationScheme: parameterValue.DestinationScheme, + }) + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("insert parameter value: %s", err), + }) + return + } + } + + job, err := api.Database.InsertProvisionerJob(r.Context(), database.InsertProvisionerJobParams{ + ID: jobID, + CreatedAt: database.Now(), + UpdatedAt: database.Now(), + OrganizationID: organization.ID, + InitiatorID: apiKey.UserID, + Provisioner: req.Provisioner, + StorageMethod: database.ProvisionerStorageMethodFile, + StorageSource: file.Hash, + Type: database.ProvisionerJobTypeProjectVersionImport, + Input: []byte{'{', '}'}, + }) + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("insert provisioner job: %s", err), + }) + return + } + + render.Status(r, http.StatusCreated) + render.JSON(rw, r, convertProvisionerJob(job)) +} + +// Returns imported parameter schemas from a completed job! +func (api *api) projectImportJobSchemasByID(rw http.ResponseWriter, r *http.Request) { + job := httpmw.ProvisionerJobParam(r) + if !convertProvisionerJob(job).Status.Completed() { + httpapi.Write(rw, http.StatusPreconditionFailed, httpapi.Response{ + Message: "Job hasn't completed!", + }) + return + } + + schemas, err := api.Database.GetParameterSchemasByJobID(r.Context(), job.ID) + if errors.Is(err, sql.ErrNoRows) { + err = nil + } + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("list parameter schemas: %s", err), + }) + return + } + if schemas == nil { + schemas = []database.ParameterSchema{} + } + render.Status(r, http.StatusOK) + render.JSON(rw, r, schemas) +} + +// Returns computed parameters for an import job by ID. +func (api *api) projectImportJobParametersByID(rw http.ResponseWriter, r *http.Request) { + apiKey := httpmw.APIKey(r) + job := httpmw.ProvisionerJobParam(r) + if !convertProvisionerJob(job).Status.Completed() { + httpapi.Write(rw, http.StatusPreconditionFailed, httpapi.Response{ + Message: "Job hasn't completed!", + }) + return + } + values, err := parameter.Compute(r.Context(), api.Database, parameter.ComputeScope{ + ProjectImportJobID: job.ID, + OrganizationID: job.OrganizationID, + UserID: apiKey.UserID, + }, ¶meter.ComputeOptions{ + // We *never* want to send the client secret parameter values. + HideRedisplayValues: true, + }) + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("compute values: %s", err), + }) + return + } + if values == nil { + values = []parameter.ComputedValue{} + } + render.Status(r, http.StatusOK) + render.JSON(rw, r, values) +} + +// Returns resources for an import job by ID. +func (api *api) projectImportJobResourcesByID(rw http.ResponseWriter, r *http.Request) { + job := httpmw.ProvisionerJobParam(r) + if !convertProvisionerJob(job).Status.Completed() { + httpapi.Write(rw, http.StatusPreconditionFailed, httpapi.Response{ + Message: "Job hasn't completed!", + }) + return + } + resources, err := api.Database.GetProjectImportJobResourcesByJobID(r.Context(), job.ID) + if errors.Is(err, sql.ErrNoRows) { + err = nil + } + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("get project import job resources: %s", err), + }) + return + } + if resources == nil { + resources = []database.ProjectImportJobResource{} + } + render.Status(r, http.StatusOK) + render.JSON(rw, r, resources) +} diff --git a/coderd/projectimport_test.go b/coderd/projectimport_test.go new file mode 100644 index 0000000000000..06140190f51d5 --- /dev/null +++ b/coderd/projectimport_test.go @@ -0,0 +1,162 @@ +package coderd_test + +import ( + "context" + "net/http" + "testing" + + "github.com/coder/coder/coderd" + "github.com/coder/coder/coderd/coderdtest" + "github.com/coder/coder/codersdk" + "github.com/coder/coder/database" + "github.com/coder/coder/provisioner/echo" + "github.com/coder/coder/provisionersdk/proto" + "github.com/stretchr/testify/require" +) + +func TestPostProjectImportByOrganization(t *testing.T) { + t.Parallel() + t.Run("FileNotFound", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + _, err := client.CreateProjectImportJob(context.Background(), user.Organization, coderd.CreateProjectImportJobRequest{ + StorageMethod: database.ProvisionerStorageMethodFile, + StorageSource: "bananas", + Provisioner: database.ProvisionerTypeEcho, + }) + require.Error(t, err) + }) + t.Run("Create", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + _ = coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) + }) +} + +func TestProjectImportJobSchemasByID(t *testing.T) { + t.Parallel() + t.Run("ListRunning", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) + _, err := client.ProjectImportJobSchemas(context.Background(), user.Organization, job.ID) + var apiErr *codersdk.Error + require.ErrorAs(t, err, &apiErr) + require.Equal(t, http.StatusPreconditionFailed, apiErr.StatusCode()) + }) + t.Run("List", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + coderdtest.NewProvisionerDaemon(t, client) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{ + Parse: []*proto.Parse_Response{{ + Type: &proto.Parse_Response_Complete{ + Complete: &proto.Parse_Complete{ + ParameterSchemas: []*proto.ParameterSchema{{ + Name: "example", + DefaultDestination: &proto.ParameterDestination{ + Scheme: proto.ParameterDestination_PROVISIONER_VARIABLE, + }, + }}, + }, + }, + }}, + Provision: echo.ProvisionComplete, + }) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) + schemas, err := client.ProjectImportJobSchemas(context.Background(), user.Organization, job.ID) + require.NoError(t, err) + require.NotNil(t, schemas) + require.Len(t, schemas, 1) + }) +} + +func TestProjectImportJobParametersByID(t *testing.T) { + t.Parallel() + t.Run("ListRunning", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) + _, err := client.ProjectImportJobSchemas(context.Background(), user.Organization, job.ID) + var apiErr *codersdk.Error + require.ErrorAs(t, err, &apiErr) + require.Equal(t, http.StatusPreconditionFailed, apiErr.StatusCode()) + }) + t.Run("List", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + coderdtest.NewProvisionerDaemon(t, client) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{ + Parse: []*proto.Parse_Response{{ + Type: &proto.Parse_Response_Complete{ + Complete: &proto.Parse_Complete{ + ParameterSchemas: []*proto.ParameterSchema{{ + Name: "example", + RedisplayValue: true, + DefaultSource: &proto.ParameterSource{ + Scheme: proto.ParameterSource_DATA, + Value: "hello", + }, + DefaultDestination: &proto.ParameterDestination{ + Scheme: proto.ParameterDestination_PROVISIONER_VARIABLE, + }, + }}, + }, + }, + }}, + Provision: echo.ProvisionComplete, + }) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) + params, err := client.ProjectImportJobParameters(context.Background(), user.Organization, job.ID) + require.NoError(t, err) + require.NotNil(t, params) + require.Len(t, params, 1) + require.Equal(t, "hello", params[0].SourceValue) + }) +} + +func TestProjectImportJobResourcesByID(t *testing.T) { + t.Parallel() + t.Run("ListRunning", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) + _, err := client.ProjectImportJobResources(context.Background(), user.Organization, job.ID) + var apiErr *codersdk.Error + require.ErrorAs(t, err, &apiErr) + require.Equal(t, http.StatusPreconditionFailed, apiErr.StatusCode()) + }) + t.Run("List", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + coderdtest.NewProvisionerDaemon(t, client) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{ + Parse: echo.ParseComplete, + Provision: []*proto.Provision_Response{{ + Type: &proto.Provision_Response_Complete{ + Complete: &proto.Provision_Complete{ + Resources: []*proto.Resource{{ + Name: "some", + Type: "example", + }}, + }, + }, + }}, + }) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) + resources, err := client.ProjectImportJobResources(context.Background(), user.Organization, job.ID) + require.NoError(t, err) + require.NotNil(t, resources) + require.Len(t, resources, 2) + require.Equal(t, "some", resources[0].Name) + require.Equal(t, "example", resources[0].Type) + }) +} diff --git a/coderd/projectparameter/projectparameter.go b/coderd/projectparameter/projectparameter.go deleted file mode 100644 index 7288c8086c9ca..0000000000000 --- a/coderd/projectparameter/projectparameter.go +++ /dev/null @@ -1,233 +0,0 @@ -package projectparameter - -import ( - "context" - "database/sql" - "errors" - "fmt" - - "github.com/google/uuid" - "golang.org/x/xerrors" - - "github.com/coder/coder/database" - "github.com/coder/coder/provisionersdk/proto" -) - -// Scope targets identifiers to pull parameters from. -type Scope struct { - ImportJobID uuid.UUID - OrganizationID string - ProjectID uuid.NullUUID - UserID sql.NullString - WorkspaceID uuid.NullUUID -} - -// Value represents a computed parameter. -type Value struct { - Proto *proto.ParameterValue - // DefaultValue is whether a default value for the scope - // was consumed. This can only be true for projects. - DefaultValue bool - Scope database.ParameterScope - ScopeID string -} - -// Compute accepts a scope in which parameter values are sourced. -// These sources are iterated in a hierarchical fashion to determine -// the runtime parameter values for a project. -func Compute(ctx context.Context, db database.Store, scope Scope, additional ...database.ParameterValue) ([]Value, error) { - compute := &compute{ - db: db, - computedParameterByName: map[string]Value{}, - parameterSchemasByName: map[string]database.ParameterSchema{}, - } - - // All parameters for the import job ID! - parameterSchemas, err := db.GetParameterSchemasByJobID(ctx, scope.ImportJobID) - if errors.Is(err, sql.ErrNoRows) { - err = nil - } - if err != nil { - return nil, xerrors.Errorf("get project parameters: %w", err) - } - for _, projectVersionParameter := range parameterSchemas { - compute.parameterSchemasByName[projectVersionParameter.Name] = projectVersionParameter - } - - // Organization parameters come first! - err = compute.injectScope(ctx, database.GetParameterValuesByScopeParams{ - Scope: database.ParameterScopeOrganization, - ScopeID: scope.OrganizationID, - }) - if err != nil { - return nil, err - } - - // Default project parameter values come second! - for _, projectVersionParameter := range parameterSchemas { - if !projectVersionParameter.DefaultSourceValue.Valid { - continue - } - if !projectVersionParameter.DefaultDestinationValue.Valid { - continue - } - - destinationScheme, err := convertDestinationScheme(projectVersionParameter.DefaultDestinationScheme) - if err != nil { - return nil, xerrors.Errorf("convert default destination scheme for project version parameter %q: %w", projectVersionParameter.Name, err) - } - - switch projectVersionParameter.DefaultSourceScheme { - case database.ParameterSourceSchemeData: - compute.computedParameterByName[projectVersionParameter.Name] = Value{ - Proto: &proto.ParameterValue{ - DestinationScheme: destinationScheme, - Name: projectVersionParameter.DefaultDestinationValue.String, - Value: projectVersionParameter.DefaultSourceValue.String, - }, - DefaultValue: true, - Scope: database.ParameterScopeProject, - ScopeID: scope.ProjectID.UUID.String(), - } - default: - return nil, xerrors.Errorf("unsupported source scheme for project version parameter %q: %q", projectVersionParameter.Name, string(projectVersionParameter.DefaultSourceScheme)) - } - } - - if scope.ProjectID.Valid { - // Project parameters come third! - err = compute.injectScope(ctx, database.GetParameterValuesByScopeParams{ - Scope: database.ParameterScopeProject, - ScopeID: scope.ProjectID.UUID.String(), - }) - if err != nil { - return nil, err - } - } - - if scope.UserID.Valid { - // User parameters come fourth! - err = compute.injectScope(ctx, database.GetParameterValuesByScopeParams{ - Scope: database.ParameterScopeUser, - ScopeID: scope.UserID.String, - }) - if err != nil { - return nil, err - } - } - - if scope.WorkspaceID.Valid { - // Workspace parameters come last! - err = compute.injectScope(ctx, database.GetParameterValuesByScopeParams{ - Scope: database.ParameterScopeWorkspace, - ScopeID: scope.WorkspaceID.UUID.String(), - }) - if err != nil { - return nil, err - } - } - - for _, parameterValue := range additional { - err = compute.injectSingle(parameterValue) - if err != nil { - return nil, xerrors.Errorf("inject %q: %w", parameterValue.Name, err) - } - } - - for _, projectVersionParameter := range compute.parameterSchemasByName { - if _, ok := compute.computedParameterByName[projectVersionParameter.Name]; ok { - continue - } - return nil, NoValueError{ - ParameterID: projectVersionParameter.ID, - ParameterName: projectVersionParameter.Name, - } - } - - values := make([]Value, 0, len(compute.computedParameterByName)) - for _, value := range compute.computedParameterByName { - values = append(values, value) - } - return values, nil -} - -type compute struct { - db database.Store - computedParameterByName map[string]Value - parameterSchemasByName map[string]database.ParameterSchema -} - -// Validates and computes the value for parameters; setting the value on "parameterByName". -func (c *compute) injectScope(ctx context.Context, scopeParams database.GetParameterValuesByScopeParams) error { - scopedParameters, err := c.db.GetParameterValuesByScope(ctx, scopeParams) - if errors.Is(err, sql.ErrNoRows) { - err = nil - } - if err != nil { - return xerrors.Errorf("get %s parameters: %w", scopeParams.Scope, err) - } - - for _, scopedParameter := range scopedParameters { - err = c.injectSingle(scopedParameter) - if err != nil { - return xerrors.Errorf("inject single %q: %w", scopedParameter.Name, err) - } - } - return nil -} - -func (c *compute) injectSingle(scopedParameter database.ParameterValue) error { - parameterSchema, hasParameterSchema := c.parameterSchemasByName[scopedParameter.Name] - if hasParameterSchema { - // Don't inject parameters that aren't defined by the project. - _, hasExistingParameter := c.computedParameterByName[scopedParameter.Name] - if hasExistingParameter { - // If a parameter already exists, check if this variable can override it. - // Injection hierarchy is the responsibility of the caller. This check ensures - // project parameters cannot be overridden if already set. - if !parameterSchema.AllowOverrideSource && scopedParameter.Scope != database.ParameterScopeProject { - return nil - } - } - } - - destinationScheme, err := convertDestinationScheme(scopedParameter.DestinationScheme) - if err != nil { - return xerrors.Errorf("convert destination scheme: %w", err) - } - - switch scopedParameter.SourceScheme { - case database.ParameterSourceSchemeData: - c.computedParameterByName[scopedParameter.Name] = Value{ - Proto: &proto.ParameterValue{ - DestinationScheme: destinationScheme, - Name: scopedParameter.SourceValue, - Value: scopedParameter.DestinationValue, - }, - } - default: - return xerrors.Errorf("unsupported source scheme: %q", string(parameterSchema.DefaultSourceScheme)) - } - return nil -} - -// Converts the database destination scheme to the protobuf version. -func convertDestinationScheme(scheme database.ParameterDestinationScheme) (proto.ParameterDestination_Scheme, error) { - switch scheme { - case database.ParameterDestinationSchemeEnvironmentVariable: - return proto.ParameterDestination_ENVIRONMENT_VARIABLE, nil - case database.ParameterDestinationSchemeProvisionerVariable: - return proto.ParameterDestination_PROVISIONER_VARIABLE, nil - default: - return 0, xerrors.Errorf("unsupported destination scheme: %q", scheme) - } -} - -type NoValueError struct { - ParameterID uuid.UUID - ParameterName string -} - -func (e NoValueError) Error() string { - return fmt.Sprintf("no value for parameter %q found", e.ParameterName) -} diff --git a/coderd/projectparameter/projectparameter_test.go b/coderd/projectparameter/projectparameter_test.go deleted file mode 100644 index 6fb04701c606c..0000000000000 --- a/coderd/projectparameter/projectparameter_test.go +++ /dev/null @@ -1,248 +0,0 @@ -package projectparameter_test - -import ( - "context" - "database/sql" - "testing" - - "github.com/google/uuid" - "github.com/stretchr/testify/require" - - "github.com/coder/coder/coderd/projectparameter" - "github.com/coder/coder/cryptorand" - "github.com/coder/coder/database" - "github.com/coder/coder/database/databasefake" - "github.com/coder/coder/provisionersdk/proto" -) - -func TestCompute(t *testing.T) { - t.Parallel() - generateScope := func() projectparameter.Scope { - return projectparameter.Scope{ - ImportJobID: uuid.New(), - OrganizationID: uuid.NewString(), - ProjectID: uuid.NullUUID{ - UUID: uuid.New(), - Valid: true, - }, - WorkspaceID: uuid.NullUUID{ - UUID: uuid.New(), - Valid: true, - }, - UserID: sql.NullString{ - String: uuid.NewString(), - Valid: true, - }, - } - } - type projectParameterOptions struct { - AllowOverrideSource bool - AllowOverrideDestination bool - DefaultDestinationScheme database.ParameterDestinationScheme - ImportJobID uuid.UUID - } - generateProjectParameter := func(t *testing.T, db database.Store, opts projectParameterOptions) database.ParameterSchema { - if opts.DefaultDestinationScheme == "" { - opts.DefaultDestinationScheme = database.ParameterDestinationSchemeEnvironmentVariable - } - name, err := cryptorand.String(8) - require.NoError(t, err) - sourceValue, err := cryptorand.String(8) - require.NoError(t, err) - destinationValue, err := cryptorand.String(8) - require.NoError(t, err) - param, err := db.InsertParameterSchema(context.Background(), database.InsertParameterSchemaParams{ - ID: uuid.New(), - Name: name, - JobID: opts.ImportJobID, - DefaultSourceScheme: database.ParameterSourceSchemeData, - DefaultSourceValue: sql.NullString{ - String: sourceValue, - Valid: true, - }, - DefaultDestinationValue: sql.NullString{ - String: destinationValue, - Valid: true, - }, - AllowOverrideSource: opts.AllowOverrideSource, - AllowOverrideDestination: opts.AllowOverrideDestination, - DefaultDestinationScheme: opts.DefaultDestinationScheme, - }) - require.NoError(t, err) - return param - } - - t.Run("NoValue", func(t *testing.T) { - t.Parallel() - db := databasefake.New() - scope := generateScope() - parameter, err := db.InsertParameterSchema(context.Background(), database.InsertParameterSchemaParams{ - ID: uuid.New(), - JobID: scope.ImportJobID, - Name: "hey", - }) - require.NoError(t, err) - - _, err = projectparameter.Compute(context.Background(), db, scope) - var noValueErr projectparameter.NoValueError - require.ErrorAs(t, err, &noValueErr) - require.Equal(t, parameter.ID.String(), noValueErr.ParameterID.String()) - require.Equal(t, parameter.Name, noValueErr.ParameterName) - }) - - t.Run("UseDefaultProjectValue", func(t *testing.T) { - t.Parallel() - db := databasefake.New() - scope := generateScope() - parameter := generateProjectParameter(t, db, projectParameterOptions{ - ImportJobID: scope.ImportJobID, - DefaultDestinationScheme: database.ParameterDestinationSchemeProvisionerVariable, - }) - values, err := projectparameter.Compute(context.Background(), db, scope) - require.NoError(t, err) - require.Len(t, values, 1) - value := values[0] - require.True(t, value.DefaultValue) - require.Equal(t, database.ParameterScopeProject, value.Scope) - require.Equal(t, scope.ProjectID.UUID.String(), value.ScopeID) - require.Equal(t, value.Proto.Name, parameter.DefaultDestinationValue.String) - require.Equal(t, value.Proto.DestinationScheme, proto.ParameterDestination_PROVISIONER_VARIABLE) - require.Equal(t, value.Proto.Value, parameter.DefaultSourceValue.String) - }) - - t.Run("OverrideOrganizationWithProjectDefault", func(t *testing.T) { - t.Parallel() - db := databasefake.New() - scope := generateScope() - parameter := generateProjectParameter(t, db, projectParameterOptions{ - ImportJobID: scope.ImportJobID, - }) - _, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{ - ID: uuid.New(), - Name: parameter.Name, - Scope: database.ParameterScopeOrganization, - ScopeID: scope.OrganizationID, - SourceScheme: database.ParameterSourceSchemeData, - SourceValue: "nop", - DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable, - DestinationValue: "organizationvalue", - }) - require.NoError(t, err) - - values, err := projectparameter.Compute(context.Background(), db, scope) - require.NoError(t, err) - require.Len(t, values, 1) - require.Equal(t, true, values[0].DefaultValue) - require.Equal(t, parameter.DefaultSourceValue.String, values[0].Proto.Value) - }) - - t.Run("ProjectOverridesProjectDefault", func(t *testing.T) { - t.Parallel() - db := databasefake.New() - scope := generateScope() - parameter := generateProjectParameter(t, db, projectParameterOptions{ - ImportJobID: scope.ImportJobID, - }) - value, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{ - ID: uuid.New(), - Name: parameter.Name, - Scope: database.ParameterScopeProject, - ScopeID: scope.ProjectID.UUID.String(), - SourceScheme: database.ParameterSourceSchemeData, - SourceValue: "nop", - DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable, - DestinationValue: "projectvalue", - }) - require.NoError(t, err) - - values, err := projectparameter.Compute(context.Background(), db, scope) - require.NoError(t, err) - require.Len(t, values, 1) - require.Equal(t, false, values[0].DefaultValue) - require.Equal(t, value.DestinationValue, values[0].Proto.Value) - }) - - t.Run("WorkspaceCannotOverwriteProjectDefault", func(t *testing.T) { - t.Parallel() - db := databasefake.New() - scope := generateScope() - parameter := generateProjectParameter(t, db, projectParameterOptions{ - ImportJobID: scope.ImportJobID, - }) - _, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{ - ID: uuid.New(), - Name: parameter.Name, - Scope: database.ParameterScopeWorkspace, - ScopeID: scope.WorkspaceID.UUID.String(), - SourceScheme: database.ParameterSourceSchemeData, - SourceValue: "nop", - DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable, - DestinationValue: "projectvalue", - }) - require.NoError(t, err) - - values, err := projectparameter.Compute(context.Background(), db, scope) - require.NoError(t, err) - require.Len(t, values, 1) - require.Equal(t, true, values[0].DefaultValue) - }) - - t.Run("WorkspaceOverwriteProjectDefault", func(t *testing.T) { - t.Parallel() - db := databasefake.New() - scope := generateScope() - parameter := generateProjectParameter(t, db, projectParameterOptions{ - AllowOverrideSource: true, - ImportJobID: scope.ImportJobID, - }) - _, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{ - ID: uuid.New(), - Name: parameter.Name, - Scope: database.ParameterScopeWorkspace, - ScopeID: scope.WorkspaceID.UUID.String(), - SourceScheme: database.ParameterSourceSchemeData, - SourceValue: "nop", - DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable, - DestinationValue: "projectvalue", - }) - require.NoError(t, err) - - values, err := projectparameter.Compute(context.Background(), db, scope) - require.NoError(t, err) - require.Len(t, values, 1) - require.Equal(t, false, values[0].DefaultValue) - }) - - t.Run("AdditionalOverwriteWorkspace", func(t *testing.T) { - t.Parallel() - db := databasefake.New() - scope := generateScope() - parameter := generateProjectParameter(t, db, projectParameterOptions{ - AllowOverrideSource: true, - ImportJobID: scope.ImportJobID, - }) - _, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{ - ID: uuid.New(), - Name: parameter.Name, - Scope: database.ParameterScopeWorkspace, - ScopeID: scope.WorkspaceID.UUID.String(), - SourceScheme: database.ParameterSourceSchemeData, - SourceValue: "nop", - DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable, - DestinationValue: "projectvalue", - }) - require.NoError(t, err) - - values, err := projectparameter.Compute(context.Background(), db, scope, database.ParameterValue{ - Name: parameter.Name, - Scope: database.ParameterScopeUser, - SourceScheme: database.ParameterSourceSchemeData, - SourceValue: "nop", - DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable, - DestinationValue: "testing", - }) - require.NoError(t, err) - require.Len(t, values, 1) - require.Equal(t, "testing", values[0].Proto.Value) - }) -} diff --git a/coderd/projects.go b/coderd/projects.go index f50c192e6a006..90c6ce243888b 100644 --- a/coderd/projects.go +++ b/coderd/projects.go @@ -16,6 +16,17 @@ import ( "github.com/coder/coder/httpmw" ) +// ParameterValue represents a set value for the scope. +type ParameterValue database.ParameterValue + +// CreateParameterValueRequest is used to create a new parameter value for a scope. +type CreateParameterValueRequest struct { + Name string `json:"name" validate:"required"` + SourceValue string `json:"source_value" validate:"required"` + SourceScheme database.ParameterSourceScheme `json:"source_scheme" validate:"oneof=data,required"` + DestinationScheme database.ParameterDestinationScheme `json:"destination_scheme" validate:"oneof=environment_variable provisioner_variable,required"` +} + // Project is the JSON representation of a Coder project. // This type matches the database object for now, but is // abstracted for ease of change later on. @@ -177,16 +188,60 @@ func (*api) projectByOrganization(rw http.ResponseWriter, r *http.Request) { // This should validate the calling user has permissions! func (api *api) postParametersByProject(rw http.ResponseWriter, r *http.Request) { project := httpmw.ProjectParam(r) + var createRequest CreateParameterValueRequest + if !httpapi.Read(rw, r, &createRequest) { + return + } + parameterValue, err := api.Database.InsertParameterValue(r.Context(), database.InsertParameterValueParams{ + ID: uuid.New(), + Name: createRequest.Name, + CreatedAt: database.Now(), + UpdatedAt: database.Now(), + Scope: database.ParameterScopeProject, + ScopeID: project.ID.String(), + SourceScheme: createRequest.SourceScheme, + SourceValue: createRequest.SourceValue, + DestinationScheme: createRequest.DestinationScheme, + }) + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("insert parameter value: %s", err), + }) + return + } - postParameterValueForScope(rw, r, api.Database, database.ParameterScopeProject, project.ID.String()) + render.Status(r, http.StatusCreated) + render.JSON(rw, r, parameterValue) } // Lists parameters for a project. func (api *api) parametersByProject(rw http.ResponseWriter, r *http.Request) { project := httpmw.ProjectParam(r) - - parametersForScope(rw, r, api.Database, database.GetParameterValuesByScopeParams{ + parameterValues, err := api.Database.GetParameterValuesByScope(r.Context(), database.GetParameterValuesByScopeParams{ Scope: database.ParameterScopeProject, ScopeID: project.ID.String(), }) + if errors.Is(err, sql.ErrNoRows) { + err = nil + parameterValues = []database.ParameterValue{} + } + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("get parameter values: %s", err), + }) + return + } + + apiParameterValues := make([]ParameterValue, 0, len(parameterValues)) + for _, parameterValue := range parameterValues { + apiParameterValues = append(apiParameterValues, convertParameterValue(parameterValue)) + } + + render.Status(r, http.StatusOK) + render.JSON(rw, r, apiParameterValues) +} + +func convertParameterValue(parameterValue database.ParameterValue) ParameterValue { + parameterValue.SourceValue = "" + return ParameterValue(parameterValue) } diff --git a/coderd/projects_test.go b/coderd/projects_test.go index ee11e2b744ad6..9ea0cbf87f443 100644 --- a/coderd/projects_test.go +++ b/coderd/projects_test.go @@ -30,7 +30,7 @@ func TestProjects(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) _ = coderdtest.CreateProject(t, client, user.Organization, job.ID) projects, err := client.Projects(context.Background(), "") require.NoError(t, err) @@ -54,7 +54,7 @@ func TestProjectsByOrganization(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) _ = coderdtest.CreateProject(t, client, user.Organization, job.ID) projects, err := client.Projects(context.Background(), "") require.NoError(t, err) @@ -68,7 +68,7 @@ func TestPostProjectsByOrganization(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) _ = coderdtest.CreateProject(t, client, user.Organization, job.ID) }) @@ -76,7 +76,7 @@ func TestPostProjectsByOrganization(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) _, err := client.CreateProject(context.Background(), user.Organization, coderd.CreateProjectRequest{ Name: project.Name, @@ -94,7 +94,7 @@ func TestProjectByOrganization(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) _, err := client.Project(context.Background(), user.Organization, project.Name) require.NoError(t, err) @@ -107,14 +107,13 @@ func TestPostParametersByProject(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) _, err := client.CreateProjectParameter(context.Background(), user.Organization, project.Name, coderd.CreateParameterValueRequest{ Name: "somename", SourceValue: "tomato", SourceScheme: database.ParameterSourceSchemeData, DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable, - DestinationValue: "moo", }) require.NoError(t, err) }) @@ -126,7 +125,7 @@ func TestParametersByProject(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) params, err := client.ProjectParameters(context.Background(), user.Organization, project.Name) require.NoError(t, err) @@ -137,14 +136,13 @@ func TestParametersByProject(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) _, err := client.CreateProjectParameter(context.Background(), user.Organization, project.Name, coderd.CreateParameterValueRequest{ Name: "example", SourceValue: "source-value", SourceScheme: database.ParameterSourceSchemeData, DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable, - DestinationValue: "destination-value", }) require.NoError(t, err) params, err := client.ProjectParameters(context.Background(), user.Organization, project.Name) diff --git a/coderd/projectversion.go b/coderd/projectversion.go index 2b603e2e10665..c1b3ecdd29ace 100644 --- a/coderd/projectversion.go +++ b/coderd/projectversion.go @@ -26,26 +26,6 @@ type ProjectVersion struct { ImportJobID uuid.UUID `json:"import_job_id"` } -// ProjectVersionParameter represents a parameter parsed from project version source on creation. -type ProjectVersionParameter struct { - ID uuid.UUID `json:"id"` - CreatedAt time.Time `json:"created_at"` - Name string `json:"name"` - Description string `json:"description,omitempty"` - DefaultSourceScheme database.ParameterSourceScheme `json:"default_source_scheme,omitempty"` - DefaultSourceValue string `json:"default_source_value,omitempty"` - AllowOverrideSource bool `json:"allow_override_source"` - DefaultDestinationScheme database.ParameterDestinationScheme `json:"default_destination_scheme,omitempty"` - DefaultDestinationValue string `json:"default_destination_value,omitempty"` - AllowOverrideDestination bool `json:"allow_override_destination"` - DefaultRefresh string `json:"default_refresh"` - RedisplayValue bool `json:"redisplay_value"` - ValidationError string `json:"validation_error,omitempty"` - ValidationCondition string `json:"validation_condition,omitempty"` - ValidationTypeSystem database.ParameterTypeSystem `json:"validation_type_system,omitempty"` - ValidationValueType string `json:"validation_value_type,omitempty"` -} - // CreateProjectVersionRequest enables callers to create a new Project Version. type CreateProjectVersionRequest struct { ImportJobID uuid.UUID `json:"import_job_id" validate:"required"` @@ -121,50 +101,6 @@ func (api *api) postProjectVersionByOrganization(rw http.ResponseWriter, r *http render.JSON(rw, r, convertProjectVersion(projectVersion)) } -func (api *api) projectVersionParametersByOrganizationAndName(rw http.ResponseWriter, r *http.Request) { - projectVersion := httpmw.ProjectVersionParam(r) - job, err := api.Database.GetProvisionerJobByID(r.Context(), projectVersion.ImportJobID) - if err != nil { - httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ - Message: fmt.Sprintf("get provisioner job: %s", err), - }) - return - } - apiJob := convertProvisionerJob(job) - if !apiJob.Status.Completed() { - httpapi.Write(rw, http.StatusPreconditionRequired, httpapi.Response{ - Message: fmt.Sprintf("import job hasn't completed: %s", apiJob.Status), - }) - return - } - if apiJob.Status != ProvisionerJobStatusSucceeded { - httpapi.Write(rw, http.StatusPreconditionFailed, httpapi.Response{ - Message: "import job wasn't successful. no parameters were parsed", - }) - return - } - - parameters, err := api.Database.GetParameterSchemasByJobID(r.Context(), projectVersion.ImportJobID) - if errors.Is(err, sql.ErrNoRows) { - err = nil - parameters = []database.ParameterSchema{} - } - if err != nil { - httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ - Message: fmt.Sprintf("get project parameters: %s", err), - }) - return - } - - apiParameters := make([]ProjectVersionParameter, 0, len(parameters)) - for _, parameter := range parameters { - apiParameters = append(apiParameters, convertProjectParameter(parameter)) - } - - render.Status(r, http.StatusOK) - render.JSON(rw, r, apiParameters) -} - func convertProjectVersion(version database.ProjectVersion) ProjectVersion { return ProjectVersion{ ID: version.ID, @@ -175,24 +111,3 @@ func convertProjectVersion(version database.ProjectVersion) ProjectVersion { ImportJobID: version.ImportJobID, } } - -func convertProjectParameter(parameter database.ParameterSchema) ProjectVersionParameter { - return ProjectVersionParameter{ - ID: parameter.ID, - CreatedAt: parameter.CreatedAt, - Name: parameter.Name, - Description: parameter.Description, - DefaultSourceScheme: parameter.DefaultSourceScheme, - DefaultSourceValue: parameter.DefaultSourceValue.String, - AllowOverrideSource: parameter.AllowOverrideSource, - DefaultDestinationScheme: parameter.DefaultDestinationScheme, - DefaultDestinationValue: parameter.DefaultDestinationValue.String, - AllowOverrideDestination: parameter.AllowOverrideDestination, - DefaultRefresh: parameter.DefaultRefresh, - RedisplayValue: parameter.RedisplayValue, - ValidationError: parameter.ValidationError, - ValidationCondition: parameter.ValidationCondition, - ValidationTypeSystem: parameter.ValidationTypeSystem, - ValidationValueType: parameter.ValidationValueType, - } -} diff --git a/coderd/projectversion_test.go b/coderd/projectversion_test.go index 467574cfa74e9..d288b53552086 100644 --- a/coderd/projectversion_test.go +++ b/coderd/projectversion_test.go @@ -2,16 +2,12 @@ package coderd_test import ( "context" - "net/http" "testing" "github.com/stretchr/testify/require" "github.com/coder/coder/coderd" "github.com/coder/coder/coderd/coderdtest" - "github.com/coder/coder/codersdk" - "github.com/coder/coder/provisioner/echo" - "github.com/coder/coder/provisionersdk/proto" ) func TestProjectVersionsByOrganization(t *testing.T) { @@ -20,7 +16,7 @@ func TestProjectVersionsByOrganization(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) versions, err := client.ProjectVersions(context.Background(), user.Organization, project.Name) require.NoError(t, err) @@ -35,7 +31,7 @@ func TestProjectVersionByOrganizationAndName(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) _, err := client.ProjectVersion(context.Background(), user.Organization, project.Name, project.ActiveVersionID.String()) require.NoError(t, err) @@ -48,7 +44,7 @@ func TestPostProjectVersionByOrganization(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) _, err := client.CreateProjectVersion(context.Background(), user.Organization, project.Name, coderd.CreateProjectVersionRequest{ ImportJobID: job.ID, @@ -56,57 +52,3 @@ func TestPostProjectVersionByOrganization(t *testing.T) { require.NoError(t, err) }) } - -func TestProjectVersionParametersByOrganizationAndName(t *testing.T) { - t.Parallel() - t.Run("NotImported", func(t *testing.T) { - t.Parallel() - client := coderdtest.New(t) - user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) - project := coderdtest.CreateProject(t, client, user.Organization, job.ID) - _, err := client.ProjectVersionParameters(context.Background(), user.Organization, project.Name, project.ActiveVersionID.String()) - var apiErr *codersdk.Error - require.ErrorAs(t, err, &apiErr) - require.Equal(t, http.StatusPreconditionRequired, apiErr.StatusCode()) - }) - - t.Run("FailedImport", func(t *testing.T) { - t.Parallel() - client := coderdtest.New(t) - user := coderdtest.CreateInitialUser(t, client) - _ = coderdtest.NewProvisionerDaemon(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, &echo.Responses{ - Provision: []*proto.Provision_Response{{}}, - }) - project := coderdtest.CreateProject(t, client, user.Organization, job.ID) - coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID) - _, err := client.ProjectVersionParameters(context.Background(), user.Organization, project.Name, project.ActiveVersionID.String()) - var apiErr *codersdk.Error - require.ErrorAs(t, err, &apiErr) - require.Equal(t, http.StatusPreconditionFailed, apiErr.StatusCode()) - }) - t.Run("List", func(t *testing.T) { - t.Parallel() - client := coderdtest.New(t) - user := coderdtest.CreateInitialUser(t, client) - _ = coderdtest.NewProvisionerDaemon(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, &echo.Responses{ - Parse: []*proto.Parse_Response{{ - Type: &proto.Parse_Response_Complete{ - Complete: &proto.Parse_Complete{ - ParameterSchemas: []*proto.ParameterSchema{{ - Name: "example", - }}, - }, - }, - }}, - Provision: echo.ProvisionComplete, - }) - project := coderdtest.CreateProject(t, client, user.Organization, job.ID) - coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID) - params, err := client.ProjectVersionParameters(context.Background(), user.Organization, project.Name, project.ActiveVersionID.String()) - require.NoError(t, err) - require.Len(t, params, 1) - }) -} diff --git a/coderd/provisionerdaemons.go b/coderd/provisionerdaemons.go index 0d51966f62a7c..a75730dc0a876 100644 --- a/coderd/provisionerdaemons.go +++ b/coderd/provisionerdaemons.go @@ -22,7 +22,7 @@ import ( "cdr.dev/slog" - "github.com/coder/coder/coderd/projectparameter" + "github.com/coder/coder/coderd/parameter" "github.com/coder/coder/database" "github.com/coder/coder/httpapi" "github.com/coder/coder/provisionerd/proto" @@ -109,16 +109,6 @@ type workspaceProvisionJob struct { DryRun bool `json:"dry_run"` } -// The input for a "project_import" job. -type projectVersionImportJob struct { - OrganizationID string `json:"organization_id"` - ProjectID uuid.UUID `json:"project_id"` - - AdditionalParameters []database.ParameterValue `json:"parameters"` - SkipParameterSchemas bool `json:"skip_parameter_schemas"` - SkipResources bool `json:"skip_resources"` -} - // Implementation of the provisioner daemon protobuf server. type provisionerdServer struct { ID uuid.UUID @@ -205,36 +195,38 @@ func (server *provisionerdServer) AcquireJob(ctx context.Context, _ *proto.Empty if err != nil { return nil, failJob(fmt.Sprintf("get project: %s", err)) } - organization, err := server.Database.GetOrganizationByID(ctx, project.OrganizationID) - if err != nil { - return nil, failJob(fmt.Sprintf("get organization: %s", err)) - } // Compute parameters for the workspace to consume. - parameters, err := projectparameter.Compute(ctx, server.Database, projectparameter.Scope{ - ImportJobID: projectVersion.ImportJobID, - OrganizationID: organization.ID, + parameters, err := parameter.Compute(ctx, server.Database, parameter.ComputeScope{ + ProjectImportJobID: projectVersion.ImportJobID, + OrganizationID: job.OrganizationID, ProjectID: uuid.NullUUID{ UUID: project.ID, Valid: true, }, - UserID: sql.NullString{ - String: user.ID, - Valid: true, - }, + UserID: user.ID, WorkspaceID: uuid.NullUUID{ UUID: workspace.ID, Valid: true, }, - }) + }, nil) if err != nil { return nil, failJob(fmt.Sprintf("compute parameters: %s", err)) } // Convert parameters to the protobuf type. protoParameters := make([]*sdkproto.ParameterValue, 0, len(parameters)) - for _, parameter := range parameters { - protoParameters = append(protoParameters, parameter.Proto) + for _, computedParameter := range parameters { + converted, err := convertComputedParameterValue(computedParameter) + if err != nil { + return nil, failJob(fmt.Sprintf("convert parameter: %s", err)) + } + protoParameters = append(protoParameters, converted) } + protoParameters = append(protoParameters, &sdkproto.ParameterValue{ + DestinationScheme: sdkproto.ParameterDestination_PROVISIONER_VARIABLE, + Name: parameter.CoderWorkspaceTransition, + Value: string(workspaceHistory.Transition), + }) protoJob.Type = &proto.AcquiredJob_WorkspaceProvision_{ WorkspaceProvision: &proto.AcquiredJob_WorkspaceProvision{ @@ -245,40 +237,8 @@ func (server *provisionerdServer) AcquireJob(ctx context.Context, _ *proto.Empty }, } case database.ProvisionerJobTypeProjectVersionImport: - var input projectVersionImportJob - err = json.Unmarshal(job.Input, &input) - if err != nil { - return nil, failJob(fmt.Sprintf("unmarshal job input %q: %s", job.Input, err)) - } - - // Compute parameters for the workspace to consume. - parameters, err := projectparameter.Compute(ctx, server.Database, projectparameter.Scope{ - ImportJobID: job.ID, - OrganizationID: input.OrganizationID, - ProjectID: uuid.NullUUID{ - UUID: input.ProjectID, - Valid: input.ProjectID.String() != uuid.Nil.String(), - }, - UserID: sql.NullString{ - String: user.ID, - Valid: true, - }, - }, input.AdditionalParameters...) - if err != nil { - return nil, failJob(fmt.Sprintf("compute parameters: %s", err)) - } - // Convert parameters to the protobuf type. - protoParameters := make([]*sdkproto.ParameterValue, 0, len(parameters)) - for _, parameter := range parameters { - protoParameters = append(protoParameters, parameter.Proto) - } - protoJob.Type = &proto.AcquiredJob_ProjectImport_{ - ProjectImport: &proto.AcquiredJob_ProjectImport{ - ParameterValues: protoParameters, - SkipParameterSchemas: input.SkipParameterSchemas, - SkipResources: input.SkipResources, - }, + ProjectImport: &proto.AcquiredJob_ProjectImport{}, } } switch job.StorageMethod { @@ -295,45 +255,41 @@ func (server *provisionerdServer) AcquireJob(ctx context.Context, _ *proto.Empty return protoJob, err } -func (server *provisionerdServer) UpdateJob(stream proto.DRPCProvisionerDaemon_UpdateJobStream) error { - for { - update, err := stream.Recv() - if err != nil { - return err - } - parsedID, err := uuid.Parse(update.JobId) - if err != nil { - return xerrors.Errorf("parse job id: %w", err) - } - job, err := server.Database.GetProvisionerJobByID(stream.Context(), parsedID) - if err != nil { - return xerrors.Errorf("get job: %w", err) - } - if !job.WorkerID.Valid { - return xerrors.New("job isn't running yet") - } - if job.WorkerID.UUID.String() != server.ID.String() { - return xerrors.New("you don't own this job") - } +func (server *provisionerdServer) UpdateJob(ctx context.Context, request *proto.UpdateJobRequest) (*proto.UpdateJobResponse, error) { + parsedID, err := uuid.Parse(request.JobId) + if err != nil { + return nil, xerrors.Errorf("parse job id: %w", err) + } + job, err := server.Database.GetProvisionerJobByID(ctx, parsedID) + if err != nil { + return nil, xerrors.Errorf("get job: %w", err) + } + if !job.WorkerID.Valid { + return nil, xerrors.New("job isn't running yet") + } + if job.WorkerID.UUID.String() != server.ID.String() { + return nil, xerrors.New("you don't own this job") + } + err = server.Database.UpdateProvisionerJobByID(ctx, database.UpdateProvisionerJobByIDParams{ + ID: parsedID, + UpdatedAt: database.Now(), + }) + if err != nil { + return nil, xerrors.Errorf("update job: %w", err) + } - err = server.Database.UpdateProvisionerJobByID(stream.Context(), database.UpdateProvisionerJobByIDParams{ - ID: parsedID, - UpdatedAt: database.Now(), - }) - if err != nil { - return xerrors.Errorf("update job: %w", err) - } + if len(request.Logs) > 0 { insertParams := database.InsertProvisionerJobLogsParams{ JobID: parsedID, } - for _, log := range update.Logs { + for _, log := range request.Logs { logLevel, err := convertLogLevel(log.Level) if err != nil { - return xerrors.Errorf("convert log level: %w", err) + return nil, xerrors.Errorf("convert log level: %w", err) } logSource, err := convertLogSource(log.Source) if err != nil { - return xerrors.Errorf("convert log source: %w", err) + return nil, xerrors.Errorf("convert log source: %w", err) } insertParams.ID = append(insertParams.ID, uuid.New()) insertParams.CreatedAt = append(insertParams.CreatedAt, time.UnixMilli(log.CreatedAt)) @@ -343,17 +299,93 @@ func (server *provisionerdServer) UpdateJob(stream proto.DRPCProvisionerDaemon_U } logs, err := server.Database.InsertProvisionerJobLogs(context.Background(), insertParams) if err != nil { - return xerrors.Errorf("insert job logs: %w", err) + return nil, xerrors.Errorf("insert job logs: %w", err) } data, err := json.Marshal(logs) if err != nil { - return xerrors.Errorf("marshal job log: %w", err) + return nil, xerrors.Errorf("marshal job log: %w", err) } err = server.Pubsub.Publish(provisionerJobLogsChannel(parsedID), data) if err != nil { - return xerrors.Errorf("publish job log: %w", err) + return nil, xerrors.Errorf("publish job log: %w", err) + } + } + + if len(request.ParameterSchemas) > 0 { + for _, protoParameter := range request.ParameterSchemas { + validationTypeSystem, err := convertValidationTypeSystem(protoParameter.ValidationTypeSystem) + if err != nil { + return nil, xerrors.Errorf("convert validation type system for %q: %w", protoParameter.Name, err) + } + + parameterSchema := database.InsertParameterSchemaParams{ + ID: uuid.New(), + CreatedAt: database.Now(), + JobID: job.ID, + Name: protoParameter.Name, + Description: protoParameter.Description, + RedisplayValue: protoParameter.RedisplayValue, + ValidationError: protoParameter.ValidationError, + ValidationCondition: protoParameter.ValidationCondition, + ValidationValueType: protoParameter.ValidationValueType, + ValidationTypeSystem: validationTypeSystem, + + DefaultSourceScheme: database.ParameterSourceSchemeNone, + DefaultDestinationScheme: database.ParameterDestinationSchemeNone, + + AllowOverrideDestination: protoParameter.AllowOverrideDestination, + AllowOverrideSource: protoParameter.AllowOverrideSource, + } + + // It's possible a parameter doesn't define a default source! + if protoParameter.DefaultSource != nil { + parameterSourceScheme, err := convertParameterSourceScheme(protoParameter.DefaultSource.Scheme) + if err != nil { + return nil, xerrors.Errorf("convert parameter source scheme: %w", err) + } + parameterSchema.DefaultSourceScheme = parameterSourceScheme + parameterSchema.DefaultSourceValue = protoParameter.DefaultSource.Value + } + + // It's possible a parameter doesn't define a default destination! + if protoParameter.DefaultDestination != nil { + parameterDestinationScheme, err := convertParameterDestinationScheme(protoParameter.DefaultDestination.Scheme) + if err != nil { + return nil, xerrors.Errorf("convert parameter destination scheme: %w", err) + } + parameterSchema.DefaultDestinationScheme = parameterDestinationScheme + } + + _, err = server.Database.InsertParameterSchema(ctx, parameterSchema) + if err != nil { + return nil, xerrors.Errorf("insert parameter schema: %w", err) + } + } + + parameters, err := parameter.Compute(ctx, server.Database, parameter.ComputeScope{ + ProjectImportJobID: job.ID, + OrganizationID: job.OrganizationID, + UserID: job.InitiatorID, + }, nil) + if err != nil { + return nil, xerrors.Errorf("compute parameters: %w", err) + } + // Convert parameters to the protobuf type. + protoParameters := make([]*sdkproto.ParameterValue, 0, len(parameters)) + for _, computedParameter := range parameters { + converted, err := convertComputedParameterValue(computedParameter) + if err != nil { + return nil, xerrors.Errorf("convert parameter: %s", err) + } + protoParameters = append(protoParameters, converted) } + + return &proto.UpdateJobResponse{ + ParameterValues: protoParameters, + }, nil } + + return &proto.UpdateJobResponse{}, nil } func (server *provisionerdServer) CancelJob(ctx context.Context, cancelJob *proto.CancelledJob) (*proto.Empty, error) { @@ -400,98 +432,48 @@ func (server *provisionerdServer) CompleteJob(ctx context.Context, completed *pr if err != nil { return nil, xerrors.Errorf("get job by id: %w", err) } - // TODO: Check if the worker ID matches! - // If it doesn't, a provisioner daemon could be impersonating another job! + if job.WorkerID.UUID.String() != server.ID.String() { + return nil, xerrors.Errorf("you don't have permission to update this job") + } switch jobType := completed.Type.(type) { case *proto.CompletedJob_ProjectImport_: - var input projectVersionImportJob - err = json.Unmarshal(job.Input, &input) - if err != nil { - return nil, xerrors.Errorf("unmarshal job data: %w", err) - } - - // Validate that all parameters send from the provisioner daemon - // follow the protocol. - parameterSchemas := make([]database.InsertParameterSchemaParams, 0, len(jobType.ProjectImport.ParameterSchemas)) - for _, protoParameter := range jobType.ProjectImport.ParameterSchemas { - validationTypeSystem, err := convertValidationTypeSystem(protoParameter.ValidationTypeSystem) - if err != nil { - return nil, xerrors.Errorf("convert validation type system for %q: %w", protoParameter.Name, err) - } - - parameterSchema := database.InsertParameterSchemaParams{ - ID: uuid.New(), - CreatedAt: database.Now(), - JobID: job.ID, - Name: protoParameter.Name, - Description: protoParameter.Description, - RedisplayValue: protoParameter.RedisplayValue, - ValidationError: protoParameter.ValidationError, - ValidationCondition: protoParameter.ValidationCondition, - ValidationValueType: protoParameter.ValidationValueType, - ValidationTypeSystem: validationTypeSystem, - - DefaultSourceScheme: database.ParameterSourceSchemeNone, - DefaultDestinationScheme: database.ParameterDestinationSchemeNone, - - AllowOverrideDestination: protoParameter.AllowOverrideDestination, - AllowOverrideSource: protoParameter.AllowOverrideSource, - } - - // It's possible a parameter doesn't define a default source! - if protoParameter.DefaultSource != nil { - parameterSourceScheme, err := convertParameterSourceScheme(protoParameter.DefaultSource.Scheme) - if err != nil { - return nil, xerrors.Errorf("convert parameter source scheme: %w", err) - } - parameterSchema.DefaultSourceScheme = parameterSourceScheme - parameterSchema.DefaultSourceValue = sql.NullString{ - String: protoParameter.DefaultSource.Value, - Valid: protoParameter.DefaultSource.Value != "", - } - } - - // It's possible a parameter doesn't define a default destination! - if protoParameter.DefaultDestination != nil { - parameterDestinationScheme, err := convertParameterDestinationScheme(protoParameter.DefaultDestination.Scheme) + for transition, resources := range map[database.WorkspaceTransition][]*sdkproto.Resource{ + database.WorkspaceTransitionStart: jobType.ProjectImport.StartResources, + database.WorkspaceTransitionStop: jobType.ProjectImport.StopResources, + } { + for _, resource := range resources { + server.Logger.Info(ctx, "inserting project import job resource", + slog.F("job_id", job.ID.String()), + slog.F("resource_name", resource.Name), + slog.F("resource_type", resource.Type), + slog.F("transition", transition)) + _, err = server.Database.InsertProjectImportJobResource(ctx, database.InsertProjectImportJobResourceParams{ + ID: uuid.New(), + CreatedAt: database.Now(), + JobID: jobID, + Transition: transition, + Type: resource.Type, + Name: resource.Name, + }) if err != nil { - return nil, xerrors.Errorf("convert parameter destination scheme: %w", err) - } - parameterSchema.DefaultDestinationScheme = parameterDestinationScheme - parameterSchema.DefaultDestinationValue = sql.NullString{ - String: protoParameter.DefaultDestination.Value, - Valid: protoParameter.DefaultDestination.Value != "", + return nil, xerrors.Errorf("insert resource: %w", err) } } - - parameterSchemas = append(parameterSchemas, parameterSchema) } - // This must occur in a transaction in case of failure. - err = server.Database.InTx(func(db database.Store) error { - err = db.UpdateProvisionerJobWithCompleteByID(ctx, database.UpdateProvisionerJobWithCompleteByIDParams{ - ID: jobID, - UpdatedAt: database.Now(), - CompletedAt: sql.NullTime{ - Time: database.Now(), - Valid: true, - }, - }) - if err != nil { - return xerrors.Errorf("update provisioner job: %w", err) - } - // This could be a bulk-insert operation to improve performance. - // See the "InsertWorkspaceHistoryLogs" query. - for _, parameterSchema := range parameterSchemas { - _, err = db.InsertParameterSchema(ctx, parameterSchema) - if err != nil { - return xerrors.Errorf("insert parameter schema %q: %w", parameterSchema.Name, err) - } - } - server.Logger.Debug(ctx, "marked import job as completed", slog.F("job_id", jobID)) - return nil + err = server.Database.UpdateProvisionerJobWithCompleteByID(ctx, database.UpdateProvisionerJobWithCompleteByIDParams{ + ID: jobID, + UpdatedAt: database.Now(), + CompletedAt: sql.NullTime{ + Time: database.Now(), + Valid: true, + }, }) + if err != nil { + return nil, xerrors.Errorf("update provisioner job: %w", err) + } + server.Logger.Debug(ctx, "marked import job as completed", slog.F("job_id", jobID)) if err != nil { return nil, xerrors.Errorf("complete job: %w", err) } @@ -614,3 +596,21 @@ func convertLogSource(logSource proto.LogSource) (database.LogSource, error) { return database.LogSource(""), xerrors.Errorf("unknown log source: %d", logSource) } } + +func convertComputedParameterValue(param parameter.ComputedValue) (*sdkproto.ParameterValue, error) { + var scheme sdkproto.ParameterDestination_Scheme + switch param.DestinationScheme { + case database.ParameterDestinationSchemeEnvironmentVariable: + scheme = sdkproto.ParameterDestination_ENVIRONMENT_VARIABLE + case database.ParameterDestinationSchemeProvisionerVariable: + scheme = sdkproto.ParameterDestination_PROVISIONER_VARIABLE + default: + return nil, xerrors.Errorf("unrecognized destination scheme: %q", param.DestinationScheme) + } + + return &sdkproto.ParameterValue{ + DestinationScheme: scheme, + Name: param.Name, + Value: param.SourceValue, + }, nil +} diff --git a/coderd/provisionerdaemons_test.go b/coderd/provisionerdaemons_test.go index 4c62c7c96e355..b0f1ff54c9b65 100644 --- a/coderd/provisionerdaemons_test.go +++ b/coderd/provisionerdaemons_test.go @@ -11,11 +11,8 @@ import ( ) func TestProvisionerDaemons(t *testing.T) { - // Tests for properly processing specific job - // types should be placed in their respective - // resource location. - // - // eg. project import is a project-related job + // Tests for properly processing specific job types should be placed + // in their respective files. t.Parallel() client := coderdtest.New(t) diff --git a/coderd/provisionerjoblogs.go b/coderd/provisionerjoblogs.go deleted file mode 100644 index f47c7825a300f..0000000000000 --- a/coderd/provisionerjoblogs.go +++ /dev/null @@ -1,196 +0,0 @@ -package coderd - -import ( - "context" - "database/sql" - "encoding/json" - "errors" - "fmt" - "net/http" - "strconv" - "time" - - "github.com/go-chi/render" - "github.com/google/uuid" - - "cdr.dev/slog" - "github.com/coder/coder/database" - "github.com/coder/coder/httpapi" - "github.com/coder/coder/httpmw" -) - -// ProvisionerJobLog represents a single log from a provisioner job. -type ProvisionerJobLog struct { - ID uuid.UUID - CreatedAt time.Time `json:"created_at"` - Source database.LogSource `json:"log_source"` - Level database.LogLevel `json:"log_level"` - Output string `json:"output"` -} - -// Returns provisioner logs based on query parameters. -// The intended usage for a client to stream all logs (with JS API): -// const timestamp = new Date().getTime(); -// 1. GET /logs?before= -// 2. GET /logs?after=&follow -// The combination of these responses should provide all current logs -// to the consumer, and future logs are streamed in the follow request. -func (api *api) provisionerJobLogsByID(rw http.ResponseWriter, r *http.Request) { - follow := r.URL.Query().Has("follow") - afterRaw := r.URL.Query().Get("after") - beforeRaw := r.URL.Query().Get("before") - if beforeRaw != "" && follow { - httpapi.Write(rw, http.StatusBadRequest, httpapi.Response{ - Message: "before cannot be used with follow", - }) - return - } - - var after time.Time - // Only fetch logs created after the time provided. - if afterRaw != "" { - afterMS, err := strconv.ParseInt(afterRaw, 10, 64) - if err != nil { - httpapi.Write(rw, http.StatusBadRequest, httpapi.Response{ - Message: fmt.Sprintf("unable to parse after %q: %s", afterRaw, err), - }) - return - } - after = time.UnixMilli(afterMS) - } else { - if follow { - after = database.Now() - } - } - var before time.Time - // Only fetch logs created before the time provided. - if beforeRaw != "" { - beforeMS, err := strconv.ParseInt(beforeRaw, 10, 64) - if err != nil { - httpapi.Write(rw, http.StatusBadRequest, httpapi.Response{ - Message: fmt.Sprintf("unable to parse before %q: %s", beforeRaw, err), - }) - return - } - before = time.UnixMilli(beforeMS) - } else { - before = database.Now() - } - - job := httpmw.ProvisionerJobParam(r) - if !follow { - logs, err := api.Database.GetProvisionerLogsByIDBetween(r.Context(), database.GetProvisionerLogsByIDBetweenParams{ - JobID: job.ID, - CreatedAfter: after, - CreatedBefore: before, - }) - if errors.Is(err, sql.ErrNoRows) { - err = nil - } - if err != nil { - httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ - Message: fmt.Sprintf("get provisioner logs: %s", err), - }) - return - } - if logs == nil { - logs = []database.ProvisionerJobLog{} - } - render.Status(r, http.StatusOK) - render.JSON(rw, r, logs) - return - } - - bufferedLogs := make(chan database.ProvisionerJobLog, 128) - closeSubscribe, err := api.Pubsub.Subscribe(provisionerJobLogsChannel(job.ID), func(ctx context.Context, message []byte) { - var logs []database.ProvisionerJobLog - err := json.Unmarshal(message, &logs) - if err != nil { - api.Logger.Warn(r.Context(), fmt.Sprintf("invalid provisioner job log on channel %q: %s", provisionerJobLogsChannel(job.ID), err.Error())) - return - } - - for _, log := range logs { - select { - case bufferedLogs <- log: - default: - // If this overflows users could miss logs streaming. This can happen - // if a database request takes a long amount of time, and we get a lot of logs. - api.Logger.Warn(r.Context(), "provisioner job log overflowing channel") - } - } - }) - if err != nil { - httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ - Message: fmt.Sprintf("subscribe to provisioner job logs: %s", err), - }) - return - } - defer closeSubscribe() - - provisionerJobLogs, err := api.Database.GetProvisionerLogsByIDBetween(r.Context(), database.GetProvisionerLogsByIDBetweenParams{ - JobID: job.ID, - CreatedAfter: after, - CreatedBefore: before, - }) - if errors.Is(err, sql.ErrNoRows) { - err = nil - provisionerJobLogs = []database.ProvisionerJobLog{} - } - if err != nil { - httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ - Message: fmt.Sprint("get provisioner job logs: %w", err), - }) - return - } - - // "follow" uses the ndjson format to stream data. - // See: https://canjs.com/doc/can-ndjson-stream.html - rw.Header().Set("Content-Type", "application/stream+json") - rw.WriteHeader(http.StatusOK) - rw.(http.Flusher).Flush() - - // The Go stdlib JSON encoder appends a newline character after message write. - encoder := json.NewEncoder(rw) - - for _, provisionerJobLog := range provisionerJobLogs { - err = encoder.Encode(convertProvisionerJobLog(provisionerJobLog)) - if err != nil { - return - } - } - - ticker := time.NewTicker(250 * time.Millisecond) - defer ticker.Stop() - for { - select { - case <-r.Context().Done(): - return - case log := <-bufferedLogs: - err = encoder.Encode(convertProvisionerJobLog(log)) - if err != nil { - return - } - rw.(http.Flusher).Flush() - case <-ticker.C: - job, err := api.Database.GetProvisionerJobByID(r.Context(), job.ID) - if err != nil { - api.Logger.Warn(r.Context(), "streaming job logs; checking if completed", slog.Error(err), slog.F("job_id", job.ID.String())) - continue - } - if convertProvisionerJob(job).Status.Completed() { - return - } - } - } -} - -func convertProvisionerJobLog(provisionerJobLog database.ProvisionerJobLog) ProvisionerJobLog { - return ProvisionerJobLog{ - ID: provisionerJobLog.ID, - CreatedAt: provisionerJobLog.CreatedAt, - Source: provisionerJobLog.Source, - Level: provisionerJobLog.Level, - Output: provisionerJobLog.Output, - } -} diff --git a/coderd/provisionerjoblogs_test.go b/coderd/provisionerjoblogs_test.go deleted file mode 100644 index 1667d6feb74d5..0000000000000 --- a/coderd/provisionerjoblogs_test.go +++ /dev/null @@ -1,133 +0,0 @@ -package coderd_test - -import ( - "context" - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/coder/coder/coderd" - "github.com/coder/coder/coderd/coderdtest" - "github.com/coder/coder/database" - "github.com/coder/coder/provisioner/echo" - "github.com/coder/coder/provisionersdk/proto" -) - -func TestProvisionerJobLogsByName(t *testing.T) { - t.Parallel() - t.Run("List", func(t *testing.T) { - t.Parallel() - client := coderdtest.New(t) - user := coderdtest.CreateInitialUser(t, client) - coderdtest.NewProvisionerDaemon(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, &echo.Responses{ - Parse: echo.ParseComplete, - Provision: []*proto.Provision_Response{{ - Type: &proto.Provision_Response_Log{ - Log: &proto.Log{ - Level: proto.LogLevel_INFO, - Output: "log-output", - }, - }, - }, { - Type: &proto.Provision_Response_Complete{ - Complete: &proto.Provision_Complete{}, - }, - }}, - }) - project := coderdtest.CreateProject(t, client, user.Organization, job.ID) - coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID) - workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID) - history, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ - ProjectVersionID: project.ActiveVersionID, - Transition: database.WorkspaceTransitionStart, - }) - require.NoError(t, err) - coderdtest.AwaitProvisionerJob(t, client, user.Organization, history.ProvisionJobID) - // Return the log after completion! - logs, err := client.ProvisionerJobLogs(context.Background(), user.Organization, history.ProvisionJobID) - require.NoError(t, err) - require.NotNil(t, logs) - require.Len(t, logs, 1) - }) - - t.Run("StreamAfterComplete", func(t *testing.T) { - t.Parallel() - client := coderdtest.New(t) - user := coderdtest.CreateInitialUser(t, client) - coderdtest.NewProvisionerDaemon(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, &echo.Responses{ - Parse: echo.ParseComplete, - Provision: []*proto.Provision_Response{{ - Type: &proto.Provision_Response_Log{ - Log: &proto.Log{ - Level: proto.LogLevel_INFO, - Output: "log-output", - }, - }, - }, { - Type: &proto.Provision_Response_Complete{ - Complete: &proto.Provision_Complete{}, - }, - }}, - }) - project := coderdtest.CreateProject(t, client, user.Organization, job.ID) - coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID) - workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID) - before := time.Now().UTC() - history, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ - ProjectVersionID: project.ActiveVersionID, - Transition: database.WorkspaceTransitionStart, - }) - require.NoError(t, err) - coderdtest.AwaitProvisionerJob(t, client, user.Organization, history.ProvisionJobID) - - logs, err := client.FollowProvisionerJobLogsAfter(context.Background(), user.Organization, history.ProvisionJobID, before) - require.NoError(t, err) - log, ok := <-logs - require.True(t, ok) - require.Equal(t, "log-output", log.Output) - // Make sure the channel automatically closes! - _, ok = <-logs - require.False(t, ok) - }) - - t.Run("StreamWhileRunning", func(t *testing.T) { - t.Parallel() - client := coderdtest.New(t) - user := coderdtest.CreateInitialUser(t, client) - coderdtest.NewProvisionerDaemon(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, &echo.Responses{ - Parse: echo.ParseComplete, - Provision: []*proto.Provision_Response{{ - Type: &proto.Provision_Response_Log{ - Log: &proto.Log{ - Level: proto.LogLevel_INFO, - Output: "log-output", - }, - }, - }, { - Type: &proto.Provision_Response_Complete{ - Complete: &proto.Provision_Complete{}, - }, - }}, - }) - project := coderdtest.CreateProject(t, client, user.Organization, job.ID) - coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID) - workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID) - before := database.Now() - history, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ - ProjectVersionID: project.ActiveVersionID, - Transition: database.WorkspaceTransitionStart, - }) - require.NoError(t, err) - logs, err := client.FollowProvisionerJobLogsAfter(context.Background(), user.Organization, history.ProvisionJobID, before) - require.NoError(t, err) - log := <-logs - require.Equal(t, "log-output", log.Output) - // Make sure the channel automatically closes! - _, ok := <-logs - require.False(t, ok) - }) -} diff --git a/coderd/provisionerjobs.go b/coderd/provisionerjobs.go index 2eb4d69c34b7d..a99ca905c83c5 100644 --- a/coderd/provisionerjobs.go +++ b/coderd/provisionerjobs.go @@ -1,16 +1,20 @@ package coderd import ( + "context" "database/sql" "encoding/json" "errors" "fmt" "net/http" + "strconv" "time" "github.com/go-chi/render" "github.com/google/uuid" + "cdr.dev/slog" + "github.com/coder/coder/database" "github.com/coder/coder/httpapi" "github.com/coder/coder/httpmw" @@ -44,78 +48,185 @@ type ProvisionerJob struct { WorkerID *uuid.UUID `json:"worker_id,omitempty"` } -type CreateProjectImportJobRequest struct { - StorageMethod database.ProvisionerStorageMethod `json:"storage_method" validate:"oneof=file,required"` - StorageSource string `json:"storage_source" validate:"required"` - Provisioner database.ProvisionerType `json:"provisioner" validate:"oneof=terraform echo,required"` - - AdditionalParameters []ParameterValue `json:"parameter_values"` - SkipParameterSchemas bool `json:"skip_parameter_schemas"` - SkipResources bool `json:"skip_resources"` +// ProvisionerJobLog represents a single log from a provisioner job. +type ProvisionerJobLog struct { + ID uuid.UUID + CreatedAt time.Time `json:"created_at"` + Source database.LogSource `json:"log_source"` + Level database.LogLevel `json:"log_level"` + Output string `json:"output"` } -func (*api) provisionerJobByOrganization(rw http.ResponseWriter, r *http.Request) { +func (*api) provisionerJobByID(rw http.ResponseWriter, r *http.Request) { job := httpmw.ProvisionerJobParam(r) - render.Status(r, http.StatusOK) render.JSON(rw, r, convertProvisionerJob(job)) } -func (api *api) postProvisionerImportJobByOrganization(rw http.ResponseWriter, r *http.Request) { - apiKey := httpmw.APIKey(r) - organization := httpmw.OrganizationParam(r) - var req CreateProjectImportJobRequest - if !httpapi.Read(rw, r, &req) { - return - } - file, err := api.Database.GetFileByHash(r.Context(), req.StorageSource) - if errors.Is(err, sql.ErrNoRows) { +// Returns provisioner logs based on query parameters. +// The intended usage for a client to stream all logs (with JS API): +// const timestamp = new Date().getTime(); +// 1. GET /logs?before= +// 2. GET /logs?after=&follow +// The combination of these responses should provide all current logs +// to the consumer, and future logs are streamed in the follow request. +func (api *api) provisionerJobLogsByID(rw http.ResponseWriter, r *http.Request) { + follow := r.URL.Query().Has("follow") + afterRaw := r.URL.Query().Get("after") + beforeRaw := r.URL.Query().Get("before") + if beforeRaw != "" && follow { httpapi.Write(rw, http.StatusBadRequest, httpapi.Response{ - Message: "file not found", + Message: "before cannot be used with follow", }) return } - if err != nil { - httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ - Message: fmt.Sprintf("get file: %s", err), + + var after time.Time + // Only fetch logs created after the time provided. + if afterRaw != "" { + afterMS, err := strconv.ParseInt(afterRaw, 10, 64) + if err != nil { + httpapi.Write(rw, http.StatusBadRequest, httpapi.Response{ + Message: fmt.Sprintf("unable to parse after %q: %s", afterRaw, err), + }) + return + } + after = time.UnixMilli(afterMS) + } else { + if follow { + after = database.Now() + } + } + var before time.Time + // Only fetch logs created before the time provided. + if beforeRaw != "" { + beforeMS, err := strconv.ParseInt(beforeRaw, 10, 64) + if err != nil { + httpapi.Write(rw, http.StatusBadRequest, httpapi.Response{ + Message: fmt.Sprintf("unable to parse before %q: %s", beforeRaw, err), + }) + return + } + before = time.UnixMilli(beforeMS) + } else { + before = database.Now() + } + + job := httpmw.ProvisionerJobParam(r) + if !follow { + logs, err := api.Database.GetProvisionerLogsByIDBetween(r.Context(), database.GetProvisionerLogsByIDBetweenParams{ + JobID: job.ID, + CreatedAfter: after, + CreatedBefore: before, }) + if errors.Is(err, sql.ErrNoRows) { + err = nil + } + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("get provisioner logs: %s", err), + }) + return + } + if logs == nil { + logs = []database.ProvisionerJobLog{} + } + render.Status(r, http.StatusOK) + render.JSON(rw, r, logs) return } - input, err := json.Marshal(projectVersionImportJob{ - // AdditionalParameters: req.AdditionalParameters, - OrganizationID: organization.ID, - SkipParameterSchemas: req.SkipParameterSchemas, - SkipResources: req.SkipResources, + bufferedLogs := make(chan database.ProvisionerJobLog, 128) + closeSubscribe, err := api.Pubsub.Subscribe(provisionerJobLogsChannel(job.ID), func(ctx context.Context, message []byte) { + var logs []database.ProvisionerJobLog + err := json.Unmarshal(message, &logs) + if err != nil { + api.Logger.Warn(r.Context(), fmt.Sprintf("invalid provisioner job log on channel %q: %s", provisionerJobLogsChannel(job.ID), err.Error())) + return + } + + for _, log := range logs { + select { + case bufferedLogs <- log: + default: + // If this overflows users could miss logs streaming. This can happen + // if a database request takes a long amount of time, and we get a lot of logs. + api.Logger.Warn(r.Context(), "provisioner job log overflowing channel") + } + } }) if err != nil { httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ - Message: fmt.Sprintf("marshal job: %s", err), + Message: fmt.Sprintf("subscribe to provisioner job logs: %s", err), }) return } + defer closeSubscribe() - job, err := api.Database.InsertProvisionerJob(r.Context(), database.InsertProvisionerJobParams{ - ID: uuid.New(), - CreatedAt: database.Now(), - UpdatedAt: database.Now(), - OrganizationID: organization.ID, - InitiatorID: apiKey.UserID, - Provisioner: req.Provisioner, - StorageMethod: database.ProvisionerStorageMethodFile, - StorageSource: file.Hash, - Type: database.ProvisionerJobTypeProjectVersionImport, - Input: input, + provisionerJobLogs, err := api.Database.GetProvisionerLogsByIDBetween(r.Context(), database.GetProvisionerLogsByIDBetweenParams{ + JobID: job.ID, + CreatedAfter: after, + CreatedBefore: before, }) + if errors.Is(err, sql.ErrNoRows) { + err = nil + } if err != nil { httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ - Message: fmt.Sprintf("insert provisioner job: %s", err), + Message: fmt.Sprint("get provisioner job logs: %w", err), }) return } - render.Status(r, http.StatusCreated) - render.JSON(rw, r, convertProvisionerJob(job)) + // "follow" uses the ndjson format to stream data. + // See: https://canjs.com/doc/can-ndjson-stream.html + rw.Header().Set("Content-Type", "application/stream+json") + rw.WriteHeader(http.StatusOK) + rw.(http.Flusher).Flush() + + // The Go stdlib JSON encoder appends a newline character after message write. + encoder := json.NewEncoder(rw) + + for _, provisionerJobLog := range provisionerJobLogs { + err = encoder.Encode(convertProvisionerJobLog(provisionerJobLog)) + if err != nil { + return + } + } + + ticker := time.NewTicker(250 * time.Millisecond) + defer ticker.Stop() + for { + select { + case <-r.Context().Done(): + return + case log := <-bufferedLogs: + err = encoder.Encode(convertProvisionerJobLog(log)) + if err != nil { + return + } + rw.(http.Flusher).Flush() + case <-ticker.C: + job, err := api.Database.GetProvisionerJobByID(r.Context(), job.ID) + if err != nil { + api.Logger.Warn(r.Context(), "streaming job logs; checking if completed", slog.Error(err), slog.F("job_id", job.ID.String())) + continue + } + if convertProvisionerJob(job).Status.Completed() { + return + } + } + } +} + +func convertProvisionerJobLog(provisionerJobLog database.ProvisionerJobLog) ProvisionerJobLog { + return ProvisionerJobLog{ + ID: provisionerJobLog.ID, + CreatedAt: provisionerJobLog.CreatedAt, + Source: provisionerJobLog.Source, + Level: provisionerJobLog.Level, + Output: provisionerJobLog.Output, + } } func convertProvisionerJob(provisionerJob database.ProvisionerJob) ProvisionerJob { diff --git a/coderd/provisionerjobs_test.go b/coderd/provisionerjobs_test.go index d16e4e976ba04..8a45d7b97f42e 100644 --- a/coderd/provisionerjobs_test.go +++ b/coderd/provisionerjobs_test.go @@ -2,12 +2,17 @@ package coderd_test import ( "context" + "net/http" "testing" "time" "github.com/stretchr/testify/require" + "github.com/coder/coder/coderd" "github.com/coder/coder/coderd/coderdtest" + "github.com/coder/coder/coderd/parameter" + "github.com/coder/coder/codersdk" + "github.com/coder/coder/database" "github.com/coder/coder/provisioner/echo" "github.com/coder/coder/provisionersdk/proto" ) @@ -20,7 +25,7 @@ func TestPostProvisionerImportJobByOrganization(t *testing.T) { user := coderdtest.CreateInitialUser(t, client) _ = coderdtest.NewProvisionerDaemon(t, client) before := time.Now() - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, &echo.Responses{ + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{ Parse: []*proto.Parse_Response{{ Type: &proto.Parse_Response_Complete{ Complete: &proto.Parse_Complete{ @@ -39,7 +44,7 @@ func TestPostProvisionerImportJobByOrganization(t *testing.T) { }, }}, }) - logs, err := client.FollowProvisionerJobLogsAfter(context.Background(), user.Organization, job.ID, before) + logs, err := client.ProjectImportJobLogsAfter(context.Background(), user.Organization, job.ID, before) require.NoError(t, err) for { log, ok := <-logs @@ -49,4 +54,274 @@ func TestPostProvisionerImportJobByOrganization(t *testing.T) { t.Log(log.Output) } }) + + t.Run("CreateWithParameters", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + _ = coderdtest.NewProvisionerDaemon(t, client) + data, err := echo.Tar(&echo.Responses{ + Parse: []*proto.Parse_Response{{ + Type: &proto.Parse_Response_Complete{ + Complete: &proto.Parse_Complete{ + ParameterSchemas: []*proto.ParameterSchema{{ + Name: "test", + RedisplayValue: true, + }}, + }, + }, + }}, + Provision: echo.ProvisionComplete, + }) + require.NoError(t, err) + file, err := client.UploadFile(context.Background(), codersdk.ContentTypeTar, data) + require.NoError(t, err) + job, err := client.CreateProjectImportJob(context.Background(), user.Organization, coderd.CreateProjectImportJobRequest{ + StorageSource: file.Hash, + StorageMethod: database.ProvisionerStorageMethodFile, + Provisioner: database.ProvisionerTypeEcho, + ParameterValues: []coderd.CreateParameterValueRequest{{ + Name: "test", + SourceValue: "somevalue", + SourceScheme: database.ParameterSourceSchemeData, + DestinationScheme: database.ParameterDestinationSchemeProvisionerVariable, + }}, + }) + require.NoError(t, err) + job = coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) + values, err := client.ProjectImportJobParameters(context.Background(), user.Organization, job.ID) + require.NoError(t, err) + require.Equal(t, "somevalue", values[0].SourceValue) + }) +} + +func TestProvisionerJobParametersByID(t *testing.T) { + t.Parallel() + t.Run("NotImported", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) + _, err := client.ProjectImportJobParameters(context.Background(), user.Organization, job.ID) + var apiErr *codersdk.Error + require.ErrorAs(t, err, &apiErr) + require.Equal(t, http.StatusPreconditionFailed, apiErr.StatusCode()) + }) + + t.Run("List", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + _ = coderdtest.NewProvisionerDaemon(t, client) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{ + Parse: []*proto.Parse_Response{{ + Type: &proto.Parse_Response_Complete{ + Complete: &proto.Parse_Complete{ + ParameterSchemas: []*proto.ParameterSchema{{ + Name: "example", + DefaultSource: &proto.ParameterSource{ + Scheme: proto.ParameterSource_DATA, + Value: "hello", + }, + DefaultDestination: &proto.ParameterDestination{ + Scheme: proto.ParameterDestination_PROVISIONER_VARIABLE, + }, + }}, + }, + }, + }}, + Provision: echo.ProvisionComplete, + }) + job = coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) + params, err := client.ProjectImportJobParameters(context.Background(), user.Organization, job.ID) + require.NoError(t, err) + require.Len(t, params, 1) + }) + + t.Run("ListNoRedisplay", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + _ = coderdtest.NewProvisionerDaemon(t, client) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{ + Parse: []*proto.Parse_Response{{ + Type: &proto.Parse_Response_Complete{ + Complete: &proto.Parse_Complete{ + ParameterSchemas: []*proto.ParameterSchema{{ + Name: "example", + DefaultSource: &proto.ParameterSource{ + Scheme: proto.ParameterSource_DATA, + Value: "tomato", + }, + DefaultDestination: &proto.ParameterDestination{ + Scheme: proto.ParameterDestination_PROVISIONER_VARIABLE, + }, + RedisplayValue: false, + }}, + }, + }, + }}, + Provision: echo.ProvisionComplete, + }) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) + params, err := client.ProjectImportJobParameters(context.Background(), user.Organization, job.ID) + require.NoError(t, err) + require.Len(t, params, 1) + require.NotNil(t, params[0]) + require.Equal(t, params[0].SourceValue, "") + }) +} + +func TestProvisionerJobResourcesByID(t *testing.T) { + t.Parallel() + t.Run("Something", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + _ = coderdtest.NewProvisionerDaemon(t, client) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{ + Parse: []*proto.Parse_Response{{ + Type: &proto.Parse_Response_Complete{ + Complete: &proto.Parse_Complete{ + ParameterSchemas: []*proto.ParameterSchema{{ + Name: parameter.CoderWorkspaceTransition, + }}, + }, + }, + }}, + Provision: []*proto.Provision_Response{{ + Type: &proto.Provision_Response_Complete{ + Complete: &proto.Provision_Complete{ + Resources: []*proto.Resource{{ + Name: "hello", + Type: "ec2_instance", + }}, + }, + }, + }}, + }) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) + resources, err := client.ProjectImportJobResources(context.Background(), user.Organization, job.ID) + require.NoError(t, err) + // One for start, and one for stop! + require.Len(t, resources, 2) + }) +} + +func TestProvisionerJobLogsByName(t *testing.T) { + t.Parallel() + t.Run("List", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + coderdtest.NewProvisionerDaemon(t, client) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{ + Parse: echo.ParseComplete, + Provision: []*proto.Provision_Response{{ + Type: &proto.Provision_Response_Log{ + Log: &proto.Log{ + Level: proto.LogLevel_INFO, + Output: "log-output", + }, + }, + }, { + Type: &proto.Provision_Response_Complete{ + Complete: &proto.Provision_Complete{}, + }, + }}, + }) + project := coderdtest.CreateProject(t, client, user.Organization, job.ID) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) + workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID) + history, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ + ProjectVersionID: project.ActiveVersionID, + Transition: database.WorkspaceTransitionStart, + }) + require.NoError(t, err) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, history.ProvisionJobID) + // Return the log after completion! + logs, err := client.WorkspaceProvisionJobLogsBefore(context.Background(), user.Organization, history.ProvisionJobID, time.Time{}) + require.NoError(t, err) + require.NotNil(t, logs) + require.Len(t, logs, 1) + }) + + t.Run("StreamAfterComplete", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + coderdtest.NewProvisionerDaemon(t, client) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{ + Parse: echo.ParseComplete, + Provision: []*proto.Provision_Response{{ + Type: &proto.Provision_Response_Log{ + Log: &proto.Log{ + Level: proto.LogLevel_INFO, + Output: "log-output", + }, + }, + }, { + Type: &proto.Provision_Response_Complete{ + Complete: &proto.Provision_Complete{}, + }, + }}, + }) + project := coderdtest.CreateProject(t, client, user.Organization, job.ID) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) + workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID) + before := time.Now().UTC() + history, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ + ProjectVersionID: project.ActiveVersionID, + Transition: database.WorkspaceTransitionStart, + }) + require.NoError(t, err) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, history.ProvisionJobID) + + logs, err := client.WorkspaceProvisionJobLogsAfter(context.Background(), user.Organization, history.ProvisionJobID, before) + require.NoError(t, err) + log, ok := <-logs + require.True(t, ok) + require.Equal(t, "log-output", log.Output) + // Make sure the channel automatically closes! + _, ok = <-logs + require.False(t, ok) + }) + + t.Run("StreamWhileRunning", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + coderdtest.NewProvisionerDaemon(t, client) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{ + Parse: echo.ParseComplete, + Provision: []*proto.Provision_Response{{ + Type: &proto.Provision_Response_Log{ + Log: &proto.Log{ + Level: proto.LogLevel_INFO, + Output: "log-output", + }, + }, + }, { + Type: &proto.Provision_Response_Complete{ + Complete: &proto.Provision_Complete{}, + }, + }}, + }) + project := coderdtest.CreateProject(t, client, user.Organization, job.ID) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) + workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID) + before := database.Now() + history, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ + ProjectVersionID: project.ActiveVersionID, + Transition: database.WorkspaceTransitionStart, + }) + require.NoError(t, err) + logs, err := client.WorkspaceProvisionJobLogsAfter(context.Background(), user.Organization, history.ProvisionJobID, before) + require.NoError(t, err) + log := <-logs + require.Equal(t, "log-output", log.Output) + // Make sure the channel automatically closes! + _, ok := <-logs + require.False(t, ok) + }) } diff --git a/coderd/workspacehistory_test.go b/coderd/workspacehistory_test.go index 7b6ba0e8cab74..987955b7a336f 100644 --- a/coderd/workspacehistory_test.go +++ b/coderd/workspacehistory_test.go @@ -22,7 +22,7 @@ func TestPostWorkspaceHistoryByUser(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID) _, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ @@ -40,11 +40,11 @@ func TestPostWorkspaceHistoryByUser(t *testing.T) { client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) coderdtest.NewProvisionerDaemon(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, &echo.Responses{ + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{ Provision: []*proto.Provision_Response{{}}, }) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) - coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID) _, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ ProjectVersionID: project.ActiveVersionID, @@ -61,9 +61,9 @@ func TestPostWorkspaceHistoryByUser(t *testing.T) { client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) closeDaemon := coderdtest.NewProvisionerDaemon(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) - coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) // Close here so workspace history doesn't process! closeDaemon.Close() workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID) @@ -87,16 +87,16 @@ func TestPostWorkspaceHistoryByUser(t *testing.T) { client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) coderdtest.NewProvisionerDaemon(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) - coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID) firstHistory, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ ProjectVersionID: project.ActiveVersionID, Transition: database.WorkspaceTransitionStart, }) require.NoError(t, err) - coderdtest.AwaitProvisionerJob(t, client, user.Organization, firstHistory.ProvisionJobID) + coderdtest.AwaitWorkspaceProvisionJob(t, client, user.Organization, firstHistory.ProvisionJobID) secondHistory, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ ProjectVersionID: project.ActiveVersionID, Transition: database.WorkspaceTransitionStart, @@ -117,7 +117,7 @@ func TestWorkspaceHistoryByUser(t *testing.T) { client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) coderdtest.NewProvisionerDaemon(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID) history, err := client.ListWorkspaceHistory(context.Background(), "me", workspace.Name) @@ -131,9 +131,9 @@ func TestWorkspaceHistoryByUser(t *testing.T) { client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) coderdtest.NewProvisionerDaemon(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) - coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID) _, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ ProjectVersionID: project.ActiveVersionID, @@ -152,8 +152,8 @@ func TestWorkspaceHistoryByName(t *testing.T) { client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) coderdtest.NewProvisionerDaemon(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) - coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) workspace := coderdtest.CreateWorkspace(t, client, "me", project.ID) history, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ diff --git a/coderd/workspaces_test.go b/coderd/workspaces_test.go index 06f4023e87499..cdc6bc353cde7 100644 --- a/coderd/workspaces_test.go +++ b/coderd/workspaces_test.go @@ -29,7 +29,7 @@ func TestWorkspaces(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) _ = coderdtest.CreateWorkspace(t, client, "", project.ID) workspaces, err := client.Workspaces(context.Background(), "") @@ -58,7 +58,7 @@ func TestPostWorkspaceByUser(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) anotherUser := coderd.CreateUserRequest{ @@ -73,7 +73,7 @@ func TestPostWorkspaceByUser(t *testing.T) { Password: anotherUser.Password, }) require.NoError(t, err) - err = client.SetSessionToken(token.SessionToken) + client.SessionToken = token.SessionToken require.NoError(t, err) _, err = client.CreateWorkspace(context.Background(), "", coderd.CreateWorkspaceRequest{ @@ -90,7 +90,7 @@ func TestPostWorkspaceByUser(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) workspace := coderdtest.CreateWorkspace(t, client, "", project.ID) _, err := client.CreateWorkspace(context.Background(), "", coderd.CreateWorkspaceRequest{ @@ -107,7 +107,7 @@ func TestPostWorkspaceByUser(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) _ = coderdtest.CreateWorkspace(t, client, "", project.ID) }) @@ -117,7 +117,7 @@ func TestWorkspaceByUser(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) workspace := coderdtest.CreateWorkspace(t, client, "", project.ID) _, err := client.Workspace(context.Background(), "", workspace.Name) @@ -130,7 +130,7 @@ func TestWorkspacesByProject(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) workspaces, err := client.WorkspacesByProject(context.Background(), user.Organization, project.Name) require.NoError(t, err) @@ -141,7 +141,7 @@ func TestWorkspacesByProject(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) _ = coderdtest.CreateWorkspace(t, client, "", project.ID) workspaces, err := client.WorkspacesByProject(context.Background(), user.Organization, project.Name) diff --git a/codersdk/client.go b/codersdk/client.go index de67ba1ae8058..976c31d06f3da 100644 --- a/codersdk/client.go +++ b/codersdk/client.go @@ -8,7 +8,6 @@ import ( "fmt" "io" "net/http" - "net/http/cookiejar" "net/url" "strings" @@ -28,27 +27,12 @@ func New(serverURL *url.URL) *Client { // Client is an HTTP caller for methods to the Coder API. type Client struct { - URL *url.URL + URL *url.URL + SessionToken string httpClient *http.Client } -// SetSessionToken applies the provided token to the current client. -func (c *Client) SetSessionToken(token string) error { - if c.httpClient.Jar == nil { - var err error - c.httpClient.Jar, err = cookiejar.New(nil) - if err != nil { - return err - } - } - c.httpClient.Jar.SetCookies(c.URL, []*http.Cookie{{ - Name: httpmw.AuthCookie, - Value: token, - }}) - return nil -} - // request performs an HTTP request with the body provided. // The caller is responsible for closing the response body. func (c *Client) request(ctx context.Context, method, path string, body interface{}, opts ...func(r *http.Request)) (*http.Response, error) { @@ -76,6 +60,10 @@ func (c *Client) request(ctx context.Context, method, path string, body interfac if err != nil { return nil, xerrors.Errorf("create request: %w", err) } + req.AddCookie(&http.Cookie{ + Name: httpmw.AuthCookie, + Value: c.SessionToken, + }) if body != nil { req.Header.Set("Content-Type", "application/json") } diff --git a/codersdk/files.go b/codersdk/files.go index 25c7fcb70cc2b..f4fe82f8cd146 100644 --- a/codersdk/files.go +++ b/codersdk/files.go @@ -20,7 +20,7 @@ func (c *Client) UploadFile(ctx context.Context, contentType string, content []b return coderd.UploadFileResponse{}, err } defer res.Body.Close() - if res.StatusCode != http.StatusCreated { + if res.StatusCode != http.StatusCreated && res.StatusCode != http.StatusOK { return coderd.UploadFileResponse{}, readBodyAsError(res) } var resp coderd.UploadFileResponse diff --git a/codersdk/projectimport.go b/codersdk/projectimport.go new file mode 100644 index 0000000000000..95f7cb6812872 --- /dev/null +++ b/codersdk/projectimport.go @@ -0,0 +1,95 @@ +package codersdk + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "time" + + "github.com/google/uuid" + + "github.com/coder/coder/coderd" +) + +// CreateProjectImportJob creates a new import job in the organization provided. +// ProjectImportJob is not associated with a project by default. Projects +// are created from import. +func (c *Client) CreateProjectImportJob(ctx context.Context, organization string, req coderd.CreateProjectImportJobRequest) (coderd.ProvisionerJob, error) { + res, err := c.request(ctx, http.MethodPost, fmt.Sprintf("/api/v2/projectimport/%s", organization), req) + if err != nil { + return coderd.ProvisionerJob{}, err + } + if res.StatusCode != http.StatusCreated { + defer res.Body.Close() + return coderd.ProvisionerJob{}, readBodyAsError(res) + } + var job coderd.ProvisionerJob + return job, json.NewDecoder(res.Body).Decode(&job) +} + +// ProjectImportJob returns an import job by ID. +func (c *Client) ProjectImportJob(ctx context.Context, organization string, job uuid.UUID) (coderd.ProvisionerJob, error) { + res, err := c.request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/projectimport/%s/%s", organization, job), nil) + if err != nil { + return coderd.ProvisionerJob{}, nil + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return coderd.ProvisionerJob{}, readBodyAsError(res) + } + var resp coderd.ProvisionerJob + return resp, json.NewDecoder(res.Body).Decode(&resp) +} + +// ProjectImportJobLogsBefore returns logs that occurred before a specific time. +func (c *Client) ProjectImportJobLogsBefore(ctx context.Context, organization string, job uuid.UUID, before time.Time) ([]coderd.ProvisionerJobLog, error) { + return c.provisionerJobLogsBefore(ctx, "projectimport", organization, job, before) +} + +// ProjectImportJobLogsAfter streams logs for a project import operation that occurred after a specific time. +func (c *Client) ProjectImportJobLogsAfter(ctx context.Context, organization string, job uuid.UUID, after time.Time) (<-chan coderd.ProvisionerJobLog, error) { + return c.provisionerJobLogsAfter(ctx, "projectimport", organization, job, after) +} + +// ProjectImportJobSchemas returns schemas for an import job by ID. +func (c *Client) ProjectImportJobSchemas(ctx context.Context, organization string, job uuid.UUID) ([]coderd.ParameterSchema, error) { + res, err := c.request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/projectimport/%s/%s/schemas", organization, job), nil) + if err != nil { + return nil, err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return nil, readBodyAsError(res) + } + var params []coderd.ParameterSchema + return params, json.NewDecoder(res.Body).Decode(¶ms) +} + +// ProjectImportJobParameters returns computed parameters for a project import job. +func (c *Client) ProjectImportJobParameters(ctx context.Context, organization string, job uuid.UUID) ([]coderd.ComputedParameterValue, error) { + res, err := c.request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/projectimport/%s/%s/parameters", organization, job), nil) + if err != nil { + return nil, err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return nil, readBodyAsError(res) + } + var params []coderd.ComputedParameterValue + return params, json.NewDecoder(res.Body).Decode(¶ms) +} + +// ProjectImportJobResources returns resources for a project import job. +func (c *Client) ProjectImportJobResources(ctx context.Context, organization string, job uuid.UUID) ([]coderd.ProjectImportJobResource, error) { + res, err := c.request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/projectimport/%s/%s/resources", organization, job), nil) + if err != nil { + return nil, err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return nil, readBodyAsError(res) + } + var resources []coderd.ProjectImportJobResource + return resources, json.NewDecoder(res.Body).Decode(&resources) +} diff --git a/codersdk/projectimport_test.go b/codersdk/projectimport_test.go new file mode 100644 index 0000000000000..8cc6b28a23f6c --- /dev/null +++ b/codersdk/projectimport_test.go @@ -0,0 +1,146 @@ +package codersdk_test + +import ( + "context" + "testing" + "time" + + "github.com/coder/coder/coderd" + "github.com/coder/coder/coderd/coderdtest" + "github.com/coder/coder/provisioner/echo" + "github.com/coder/coder/provisionersdk/proto" + "github.com/google/uuid" + "github.com/stretchr/testify/require" +) + +func TestCreateProjectImportJob(t *testing.T) { + t.Parallel() + t.Run("Error", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + _, err := client.CreateProjectImportJob(context.Background(), "", coderd.CreateProjectImportJobRequest{}) + require.Error(t, err) + }) + + t.Run("Create", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + _ = coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) + }) +} + +func TestProjectImportJob(t *testing.T) { + t.Parallel() + t.Run("Error", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + _, err := client.ProjectImportJob(context.Background(), "", uuid.New()) + require.Error(t, err) + }) + + t.Run("Get", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) + _, err := client.ProjectImportJob(context.Background(), user.Organization, job.ID) + require.NoError(t, err) + }) +} + +func TestProjectImportJobLogsBefore(t *testing.T) { + t.Parallel() + t.Run("Error", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + _, err := client.ProjectImportJobLogsBefore(context.Background(), "", uuid.New(), time.Time{}) + require.Error(t, err) + }) + + t.Run("Get", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + coderdtest.NewProvisionerDaemon(t, client) + before := time.Now() + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{ + Parse: []*proto.Parse_Response{{ + Type: &proto.Parse_Response_Log{ + Log: &proto.Log{ + Output: "hello", + }, + }, + }}, + Provision: echo.ProvisionComplete, + }) + logs, err := client.ProjectImportJobLogsAfter(context.Background(), user.Organization, job.ID, before) + require.NoError(t, err) + <-logs + }) +} + +func TestProjectImportJobLogsAfter(t *testing.T) { + t.Parallel() + t.Run("Error", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + _, err := client.ProjectImportJobLogsAfter(context.Background(), "", uuid.New(), time.Time{}) + require.Error(t, err) + }) + + t.Run("Get", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + user := coderdtest.CreateInitialUser(t, client) + coderdtest.NewProvisionerDaemon(t, client) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, &echo.Responses{ + Parse: []*proto.Parse_Response{{ + Type: &proto.Parse_Response_Log{ + Log: &proto.Log{ + Output: "hello", + }, + }, + }, { + Type: &proto.Parse_Response_Complete{ + Complete: &proto.Parse_Complete{}, + }, + }}, + Provision: echo.ProvisionComplete, + }) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) + logs, err := client.ProjectImportJobLogsBefore(context.Background(), user.Organization, job.ID, time.Time{}) + require.NoError(t, err) + require.Len(t, logs, 1) + }) +} + +func TestProjectImportJobSchemas(t *testing.T) { + t.Parallel() + t.Run("Error", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + _, err := client.ProjectImportJobSchemas(context.Background(), "", uuid.New()) + require.Error(t, err) + }) +} + +func TestProjectImportJobParameters(t *testing.T) { + t.Parallel() + t.Run("Error", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + _, err := client.ProjectImportJobParameters(context.Background(), "", uuid.New()) + require.Error(t, err) + }) +} + +func TestProjectImportJobResources(t *testing.T) { + t.Parallel() + t.Run("Error", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t) + _, err := client.ProjectImportJobResources(context.Background(), "", uuid.New()) + require.Error(t, err) + }) +} diff --git a/codersdk/projects.go b/codersdk/projects.go index b58825778c922..3e627ae571ed4 100644 --- a/codersdk/projects.go +++ b/codersdk/projects.go @@ -99,20 +99,6 @@ func (c *Client) CreateProjectVersion(ctx context.Context, organization, project return projectVersion, json.NewDecoder(res.Body).Decode(&projectVersion) } -// ProjectVersionParameters returns project parameters for a version by name. -func (c *Client) ProjectVersionParameters(ctx context.Context, organization, project, version string) ([]coderd.ProjectVersionParameter, error) { - res, err := c.request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/projects/%s/%s/versions/%s/parameters", organization, project, version), nil) - if err != nil { - return nil, err - } - defer res.Body.Close() - if res.StatusCode != http.StatusOK { - return nil, readBodyAsError(res) - } - var params []coderd.ProjectVersionParameter - return params, json.NewDecoder(res.Body).Decode(¶ms) -} - // ProjectParameters returns parameters scoped to a project. func (c *Client) ProjectParameters(ctx context.Context, organization, project string) ([]coderd.ParameterValue, error) { res, err := c.request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/projects/%s/%s/parameters", organization, project), nil) diff --git a/codersdk/projects_test.go b/codersdk/projects_test.go index f106d3a42652c..4b5459fbede15 100644 --- a/codersdk/projects_test.go +++ b/codersdk/projects_test.go @@ -43,7 +43,7 @@ func TestProject(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) _, err := client.Project(context.Background(), user.Organization, project.Name) require.NoError(t, err) @@ -66,7 +66,7 @@ func TestCreateProject(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) _ = coderdtest.CreateProject(t, client, user.Organization, job.ID) }) } @@ -84,7 +84,7 @@ func TestProjectVersions(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) _, err := client.ProjectVersions(context.Background(), user.Organization, project.Name) require.NoError(t, err) @@ -104,7 +104,7 @@ func TestProjectVersion(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) _, err := client.ProjectVersion(context.Background(), user.Organization, project.Name, project.ActiveVersionID.String()) require.NoError(t, err) @@ -124,7 +124,7 @@ func TestCreateProjectVersion(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) _, err := client.CreateProjectVersion(context.Background(), user.Organization, project.Name, coderd.CreateProjectVersionRequest{ ImportJobID: job.ID, @@ -133,28 +133,6 @@ func TestCreateProjectVersion(t *testing.T) { }) } -func TestProjectVersionParameters(t *testing.T) { - t.Parallel() - t.Run("Error", func(t *testing.T) { - t.Parallel() - client := coderdtest.New(t) - _, err := client.ProjectVersionParameters(context.Background(), "some", "project", "version") - require.Error(t, err) - }) - - t.Run("List", func(t *testing.T) { - t.Parallel() - client := coderdtest.New(t) - user := coderdtest.CreateInitialUser(t, client) - coderdtest.NewProvisionerDaemon(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) - project := coderdtest.CreateProject(t, client, user.Organization, job.ID) - coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID) - _, err := client.ProjectVersionParameters(context.Background(), user.Organization, project.Name, project.ActiveVersionID.String()) - require.NoError(t, err) - }) -} - func TestProjectParameters(t *testing.T) { t.Parallel() t.Run("Error", func(t *testing.T) { @@ -168,7 +146,7 @@ func TestProjectParameters(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) _, err := client.ProjectParameters(context.Background(), user.Organization, project.Name) require.NoError(t, err) @@ -188,14 +166,13 @@ func TestCreateProjectParameter(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) _, err := client.CreateProjectParameter(context.Background(), user.Organization, project.Name, coderd.CreateParameterValueRequest{ Name: "example", SourceValue: "source-value", SourceScheme: database.ParameterSourceSchemeData, DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable, - DestinationValue: "destination-value", }) require.NoError(t, err) }) diff --git a/codersdk/provisioners.go b/codersdk/provisioners.go index 1aea254aeba7a..afef953beabb9 100644 --- a/codersdk/provisioners.go +++ b/codersdk/provisioners.go @@ -20,6 +20,7 @@ import ( "github.com/coder/coder/provisionersdk" ) +// ProvisionerDaemons returns registered provisionerd instances. func (c *Client) ProvisionerDaemons(ctx context.Context) ([]coderd.ProvisionerDaemon, error) { res, err := c.request(ctx, http.MethodGet, "/api/v2/provisioners/daemons", nil) if err != nil { @@ -59,51 +60,15 @@ func (c *Client) ProvisionerDaemonClient(ctx context.Context) (proto.DRPCProvisi return proto.NewDRPCProvisionerDaemonClient(provisionersdk.Conn(session)), nil } -// CreateProjectVersionImportProvisionerJob creates a job for importing -// the provided project version. -func (c *Client) CreateProjectVersionImportProvisionerJob(ctx context.Context, organization string, req coderd.CreateProjectImportJobRequest) (coderd.ProvisionerJob, error) { - res, err := c.request(ctx, http.MethodPost, fmt.Sprintf("/api/v2/provisioners/jobs/%s/import", organization), req) - if err != nil { - return coderd.ProvisionerJob{}, err - } - if res.StatusCode != http.StatusCreated { - defer res.Body.Close() - return coderd.ProvisionerJob{}, readBodyAsError(res) - } - var job coderd.ProvisionerJob - return job, json.NewDecoder(res.Body).Decode(&job) -} - -// ProvisionerJob returns a job by ID. -func (c *Client) ProvisionerJob(ctx context.Context, organization string, job uuid.UUID) (coderd.ProvisionerJob, error) { - res, err := c.request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/provisioners/jobs/%s/%s", organization, job), nil) - if err != nil { - return coderd.ProvisionerJob{}, nil - } - defer res.Body.Close() - if res.StatusCode != http.StatusOK { - return coderd.ProvisionerJob{}, readBodyAsError(res) - } - var resp coderd.ProvisionerJob - return resp, json.NewDecoder(res.Body).Decode(&resp) -} - -// ProvisionerJobLogs returns all logs for workspace history. -// To stream logs, use the FollowProvisionerJobLogs function. -func (c *Client) ProvisionerJobLogs(ctx context.Context, organization string, jobID uuid.UUID) ([]coderd.ProvisionerJobLog, error) { - return c.ProvisionerJobLogsBetween(ctx, organization, jobID, time.Time{}, time.Time{}) -} - -// ProvisionerJobLogsBetween returns logs between a specific time. -func (c *Client) ProvisionerJobLogsBetween(ctx context.Context, organization string, jobID uuid.UUID, after, before time.Time) ([]coderd.ProvisionerJobLog, error) { +// provisionerJobLogsBefore provides log output that occurred before a time. +// This is abstracted from a specific job type to provide consistency between +// APIs. Logs is the only shared route between jobs. +func (c *Client) provisionerJobLogsBefore(ctx context.Context, jobType, organization string, job uuid.UUID, before time.Time) ([]coderd.ProvisionerJobLog, error) { values := url.Values{} - if !after.IsZero() { - values["after"] = []string{strconv.FormatInt(after.UTC().UnixMilli(), 10)} - } if !before.IsZero() { values["before"] = []string{strconv.FormatInt(before.UTC().UnixMilli(), 10)} } - res, err := c.request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/provisioners/jobs/%s/%s/logs?%s", organization, jobID, values.Encode()), nil) + res, err := c.request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/%s/%s/%s/logs?%s", jobType, organization, job, values.Encode()), nil) if err != nil { return nil, err } @@ -116,14 +81,13 @@ func (c *Client) ProvisionerJobLogsBetween(ctx context.Context, organization str return logs, json.NewDecoder(res.Body).Decode(&logs) } -// FollowProvisionerJobLogsAfter returns a stream of workspace history logs. -// The channel will close when the workspace history job is no longer active. -func (c *Client) FollowProvisionerJobLogsAfter(ctx context.Context, organization string, jobID uuid.UUID, after time.Time) (<-chan coderd.ProvisionerJobLog, error) { +// provisionerJobLogsAfter streams logs that occurred after a specific time. +func (c *Client) provisionerJobLogsAfter(ctx context.Context, jobType, organization string, job uuid.UUID, after time.Time) (<-chan coderd.ProvisionerJobLog, error) { afterQuery := "" if !after.IsZero() { afterQuery = fmt.Sprintf("&after=%d", after.UTC().UnixMilli()) } - res, err := c.request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/provisioners/jobs/%s/%s/logs?follow%s", organization, jobID, afterQuery), nil) + res, err := c.request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/%s/%s/%s/logs?follow%s", jobType, organization, job, afterQuery), nil) if err != nil { return nil, err } diff --git a/codersdk/provisioners_test.go b/codersdk/provisioners_test.go index 6808dd19a3b78..9bb4528ebec1e 100644 --- a/codersdk/provisioners_test.go +++ b/codersdk/provisioners_test.go @@ -3,16 +3,11 @@ package codersdk_test import ( "context" "testing" - "time" - "github.com/google/uuid" "github.com/stretchr/testify/require" "github.com/coder/coder/coderd/coderdtest" - "github.com/coder/coder/database" - "github.com/coder/coder/provisioner/echo" "github.com/coder/coder/provisionerd/proto" - sdkproto "github.com/coder/coder/provisionersdk/proto" ) func TestProvisionerDaemons(t *testing.T) { @@ -49,60 +44,3 @@ func TestProvisionerDaemonClient(t *testing.T) { require.NoError(t, err) }) } -func TestProvisionerJobLogs(t *testing.T) { - t.Parallel() - t.Run("Error", func(t *testing.T) { - t.Parallel() - client := coderdtest.New(t) - _, err := client.ProvisionerJobLogs(context.Background(), "nothing", uuid.New()) - require.Error(t, err) - }) - - t.Run("List", func(t *testing.T) { - t.Parallel() - client := coderdtest.New(t) - user := coderdtest.CreateInitialUser(t, client) - _ = coderdtest.NewProvisionerDaemon(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) - _, err := client.ProvisionerJobLogs(context.Background(), user.Organization, job.ID) - require.NoError(t, err) - }) -} - -func TestFollowProvisionerJobLogsAfter(t *testing.T) { - t.Parallel() - t.Run("Error", func(t *testing.T) { - t.Parallel() - client := coderdtest.New(t) - _, err := client.FollowProvisionerJobLogsAfter(context.Background(), "nothing", uuid.New(), time.Time{}) - require.Error(t, err) - }) - - t.Run("Stream", func(t *testing.T) { - t.Parallel() - client := coderdtest.New(t) - user := coderdtest.CreateInitialUser(t, client) - _ = coderdtest.NewProvisionerDaemon(t, client) - before := database.Now() - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, &echo.Responses{ - Parse: []*sdkproto.Parse_Response{{ - Type: &sdkproto.Parse_Response_Log{ - Log: &sdkproto.Log{ - Output: "hello", - }, - }, - }, { - Type: &sdkproto.Parse_Response_Complete{ - Complete: &sdkproto.Parse_Complete{}, - }, - }}, - Provision: echo.ProvisionComplete, - }) - logs, err := client.FollowProvisionerJobLogsAfter(context.Background(), user.Organization, job.ID, before) - require.NoError(t, err) - _, ok := <-logs - require.True(t, ok) - _, ok = <-logs - require.False(t, ok) - }) -} diff --git a/codersdk/workspaces.go b/codersdk/workspaces.go index b84efeff628d1..28f926c518049 100644 --- a/codersdk/workspaces.go +++ b/codersdk/workspaces.go @@ -5,6 +5,9 @@ import ( "encoding/json" "fmt" "net/http" + "time" + + "github.com/google/uuid" "github.com/coder/coder/coderd" ) @@ -131,3 +134,26 @@ func (c *Client) CreateWorkspaceHistory(ctx context.Context, owner, workspace st var workspaceHistory coderd.WorkspaceHistory return workspaceHistory, json.NewDecoder(res.Body).Decode(&workspaceHistory) } + +func (c *Client) WorkspaceProvisionJob(ctx context.Context, organization string, job uuid.UUID) (coderd.ProvisionerJob, error) { + res, err := c.request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/workspaceprovision/%s/%s", organization, job), nil) + if err != nil { + return coderd.ProvisionerJob{}, nil + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return coderd.ProvisionerJob{}, readBodyAsError(res) + } + var resp coderd.ProvisionerJob + return resp, json.NewDecoder(res.Body).Decode(&resp) +} + +// WorkspaceProvisionJobLogsBefore returns logs that occurred before a specific time. +func (c *Client) WorkspaceProvisionJobLogsBefore(ctx context.Context, organization string, job uuid.UUID, before time.Time) ([]coderd.ProvisionerJobLog, error) { + return c.provisionerJobLogsBefore(ctx, "workspaceprovision", organization, job, before) +} + +// WorkspaceProvisionJobLogsAfter streams logs for a workspace provision operation that occurred after a specific time. +func (c *Client) WorkspaceProvisionJobLogsAfter(ctx context.Context, organization string, job uuid.UUID, after time.Time) (<-chan coderd.ProvisionerJobLog, error) { + return c.provisionerJobLogsAfter(ctx, "workspaceprovision", organization, job, after) +} diff --git a/codersdk/workspaces_test.go b/codersdk/workspaces_test.go index 39ad8eddf4663..503895243dbcb 100644 --- a/codersdk/workspaces_test.go +++ b/codersdk/workspaces_test.go @@ -42,7 +42,7 @@ func TestWorkspacesByProject(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) _, err := client.WorkspacesByProject(context.Background(), user.Organization, project.Name) require.NoError(t, err) @@ -62,7 +62,7 @@ func TestWorkspace(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) workspace := coderdtest.CreateWorkspace(t, client, "", project.ID) _, err := client.Workspace(context.Background(), "", workspace.Name) @@ -83,7 +83,7 @@ func TestListWorkspaceHistory(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) workspace := coderdtest.CreateWorkspace(t, client, "", project.ID) _, err := client.ListWorkspaceHistory(context.Background(), "", workspace.Name) @@ -105,9 +105,9 @@ func TestWorkspaceHistory(t *testing.T) { client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) _ = coderdtest.NewProvisionerDaemon(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) - coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) workspace := coderdtest.CreateWorkspace(t, client, "", project.ID) _, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ ProjectVersionID: project.ActiveVersionID, @@ -130,7 +130,7 @@ func TestCreateWorkspace(t *testing.T) { t.Parallel() client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) _ = coderdtest.CreateWorkspace(t, client, "", project.ID) }) @@ -150,9 +150,9 @@ func TestCreateWorkspaceHistory(t *testing.T) { client := coderdtest.New(t) user := coderdtest.CreateInitialUser(t, client) _ = coderdtest.NewProvisionerDaemon(t, client) - job := coderdtest.CreateProjectImportProvisionerJob(t, client, user.Organization, nil) + job := coderdtest.CreateProjectImportJob(t, client, user.Organization, nil) project := coderdtest.CreateProject(t, client, user.Organization, job.ID) - coderdtest.AwaitProvisionerJob(t, client, user.Organization, job.ID) + coderdtest.AwaitProjectImportJob(t, client, user.Organization, job.ID) workspace := coderdtest.CreateWorkspace(t, client, "", project.ID) _, err := client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ ProjectVersionID: project.ActiveVersionID, diff --git a/database/databasefake/databasefake.go b/database/databasefake/databasefake.go index 99793bc7409a9..3af24ad8bb563 100644 --- a/database/databasefake/databasefake.go +++ b/database/databasefake/databasefake.go @@ -19,18 +19,19 @@ func New() database.Store { organizationMembers: make([]database.OrganizationMember, 0), users: make([]database.User, 0), - files: make([]database.File, 0), - parameterValue: make([]database.ParameterValue, 0), - parameterSchema: make([]database.ParameterSchema, 0), - project: make([]database.Project, 0), - projectVersion: make([]database.ProjectVersion, 0), - provisionerDaemons: make([]database.ProvisionerDaemon, 0), - provisionerJobs: make([]database.ProvisionerJob, 0), - provisionerJobLog: make([]database.ProvisionerJobLog, 0), - workspace: make([]database.Workspace, 0), - workspaceResource: make([]database.WorkspaceResource, 0), - workspaceHistory: make([]database.WorkspaceHistory, 0), - workspaceAgent: make([]database.WorkspaceAgent, 0), + files: make([]database.File, 0), + parameterValue: make([]database.ParameterValue, 0), + parameterSchema: make([]database.ParameterSchema, 0), + project: make([]database.Project, 0), + projectVersion: make([]database.ProjectVersion, 0), + projectImportJobResource: make([]database.ProjectImportJobResource, 0), + provisionerDaemons: make([]database.ProvisionerDaemon, 0), + provisionerJobs: make([]database.ProvisionerJob, 0), + provisionerJobLog: make([]database.ProvisionerJobLog, 0), + workspace: make([]database.Workspace, 0), + workspaceResource: make([]database.WorkspaceResource, 0), + workspaceHistory: make([]database.WorkspaceHistory, 0), + workspaceAgent: make([]database.WorkspaceAgent, 0), } } @@ -45,18 +46,19 @@ type fakeQuerier struct { users []database.User // New tables - files []database.File - parameterValue []database.ParameterValue - parameterSchema []database.ParameterSchema - project []database.Project - projectVersion []database.ProjectVersion - provisionerDaemons []database.ProvisionerDaemon - provisionerJobs []database.ProvisionerJob - provisionerJobLog []database.ProvisionerJobLog - workspace []database.Workspace - workspaceAgent []database.WorkspaceAgent - workspaceHistory []database.WorkspaceHistory - workspaceResource []database.WorkspaceResource + files []database.File + parameterValue []database.ParameterValue + parameterSchema []database.ParameterSchema + project []database.Project + projectVersion []database.ProjectVersion + projectImportJobResource []database.ProjectImportJobResource + provisionerDaemons []database.ProvisionerDaemon + provisionerJobs []database.ProvisionerJob + provisionerJobLog []database.ProvisionerJobLog + workspace []database.Workspace + workspaceAgent []database.WorkspaceAgent + workspaceHistory []database.WorkspaceHistory + workspaceResource []database.WorkspaceResource } // InTx doesn't rollback data properly for in-memory yet. @@ -399,6 +401,23 @@ func (q *fakeQuerier) GetProjectByOrganizationAndName(_ context.Context, arg dat return database.Project{}, sql.ErrNoRows } +func (q *fakeQuerier) GetProjectImportJobResourcesByJobID(_ context.Context, jobID uuid.UUID) ([]database.ProjectImportJobResource, error) { + q.mutex.Lock() + defer q.mutex.Unlock() + + resources := make([]database.ProjectImportJobResource, 0) + for _, resource := range q.projectImportJobResource { + if resource.JobID.String() != jobID.String() { + continue + } + resources = append(resources, resource) + } + if len(resources) == 0 { + return nil, sql.ErrNoRows + } + return resources, nil +} + func (q *fakeQuerier) GetProjectVersionsByProjectID(_ context.Context, projectID uuid.UUID) ([]database.ProjectVersion, error) { q.mutex.Lock() defer q.mutex.Unlock() @@ -643,7 +662,6 @@ func (q *fakeQuerier) InsertParameterValue(_ context.Context, arg database.Inser SourceScheme: arg.SourceScheme, SourceValue: arg.SourceValue, DestinationScheme: arg.DestinationScheme, - DestinationValue: arg.DestinationValue, } q.parameterValue = append(q.parameterValue, parameterValue) return parameterValue, nil @@ -667,6 +685,23 @@ func (q *fakeQuerier) InsertProject(_ context.Context, arg database.InsertProjec return project, nil } +func (q *fakeQuerier) InsertProjectImportJobResource(_ context.Context, arg database.InsertProjectImportJobResourceParams) (database.ProjectImportJobResource, error) { + q.mutex.Lock() + defer q.mutex.Unlock() + + //nolint:gosimple + projectResource := database.ProjectImportJobResource{ + ID: arg.ID, + CreatedAt: arg.CreatedAt, + JobID: arg.JobID, + Transition: arg.Transition, + Type: arg.Type, + Name: arg.Name, + } + q.projectImportJobResource = append(q.projectImportJobResource, projectResource) + return projectResource, nil +} + func (q *fakeQuerier) InsertProjectVersion(_ context.Context, arg database.InsertProjectVersionParams) (database.ProjectVersion, error) { q.mutex.Lock() defer q.mutex.Unlock() @@ -719,7 +754,6 @@ func (q *fakeQuerier) InsertParameterSchema(_ context.Context, arg database.Inse DefaultSourceValue: arg.DefaultSourceValue, AllowOverrideSource: arg.AllowOverrideSource, DefaultDestinationScheme: arg.DefaultDestinationScheme, - DefaultDestinationValue: arg.DefaultDestinationValue, AllowOverrideDestination: arg.AllowOverrideDestination, DefaultRefresh: arg.DefaultRefresh, RedisplayValue: arg.RedisplayValue, diff --git a/database/dump.sql b/database/dump.sql index c21c963db56f5..d0b1d194b3099 100644 --- a/database/dump.sql +++ b/database/dump.sql @@ -28,6 +28,7 @@ CREATE TYPE parameter_destination_scheme AS ENUM ( CREATE TYPE parameter_scope AS ENUM ( 'organization', 'project', + 'import_job', 'user', 'workspace' ); @@ -87,7 +88,7 @@ CREATE TABLE api_keys ( ); CREATE TABLE file ( - hash character varying(32) NOT NULL, + hash character varying(64) NOT NULL, created_at timestamp with time zone NOT NULL, created_by text NOT NULL, mimetype character varying(64) NOT NULL, @@ -128,10 +129,9 @@ CREATE TABLE parameter_schema ( name character varying(64) NOT NULL, description character varying(8192) DEFAULT ''::character varying NOT NULL, default_source_scheme parameter_source_scheme, - default_source_value text, + default_source_value text NOT NULL, allow_override_source boolean NOT NULL, default_destination_scheme parameter_destination_scheme, - default_destination_value text, allow_override_destination boolean NOT NULL, default_refresh text NOT NULL, redisplay_value boolean NOT NULL, @@ -150,8 +150,7 @@ CREATE TABLE parameter_value ( scope_id text NOT NULL, source_scheme parameter_source_scheme NOT NULL, source_value text NOT NULL, - destination_scheme parameter_destination_scheme NOT NULL, - destination_value text NOT NULL + destination_scheme parameter_destination_scheme NOT NULL ); CREATE TABLE project ( @@ -164,6 +163,15 @@ CREATE TABLE project ( active_version_id uuid NOT NULL ); +CREATE TABLE project_import_job_resource ( + id uuid NOT NULL, + created_at timestamp with time zone NOT NULL, + job_id uuid NOT NULL, + transition workspace_transition NOT NULL, + type character varying(256) NOT NULL, + name character varying(64) NOT NULL +); + CREATE TABLE project_version ( id uuid NOT NULL, project_id uuid NOT NULL, @@ -292,6 +300,9 @@ ALTER TABLE ONLY parameter_value ALTER TABLE ONLY project ADD CONSTRAINT project_id_key UNIQUE (id); +ALTER TABLE ONLY project_import_job_resource + ADD CONSTRAINT project_import_job_resource_id_key UNIQUE (id); + ALTER TABLE ONLY project ADD CONSTRAINT project_organization_id_name_key UNIQUE (organization_id, name); @@ -340,6 +351,9 @@ ALTER TABLE ONLY workspace_resource ALTER TABLE ONLY parameter_schema ADD CONSTRAINT parameter_schema_job_id_fkey FOREIGN KEY (job_id) REFERENCES provisioner_job(id) ON DELETE CASCADE; +ALTER TABLE ONLY project_import_job_resource + ADD CONSTRAINT project_import_job_resource_job_id_fkey FOREIGN KEY (job_id) REFERENCES provisioner_job(id) ON DELETE CASCADE; + ALTER TABLE ONLY project_version ADD CONSTRAINT project_version_project_id_fkey FOREIGN KEY (project_id) REFERENCES project(id); diff --git a/database/migrations/000004_jobs.up.sql b/database/migrations/000004_jobs.up.sql index d2fd0ecdd94cb..4d100e7f6fa80 100644 --- a/database/migrations/000004_jobs.up.sql +++ b/database/migrations/000004_jobs.up.sql @@ -58,6 +58,7 @@ CREATE TABLE IF NOT EXISTS provisioner_job_log ( CREATE TYPE parameter_scope AS ENUM ( 'organization', 'project', + 'import_job', 'user', 'workspace' ); @@ -71,22 +72,6 @@ CREATE TYPE parameter_source_scheme AS ENUM('none', 'data'); -- Supported schemes for a parameter destination. CREATE TYPE parameter_destination_scheme AS ENUM('none', 'environment_variable', 'provisioner_variable'); --- Parameters are provided to jobs for provisioning and to workspaces. -CREATE TABLE parameter_value ( - id uuid NOT NULL UNIQUE, - name varchar(64) NOT NULL, - created_at timestamptz NOT NULL, - updated_at timestamptz NOT NULL, - scope parameter_scope NOT NULL, - scope_id text NOT NULL, - source_scheme parameter_source_scheme NOT NULL, - source_value text NOT NULL, - destination_scheme parameter_destination_scheme NOT NULL, - destination_value text NOT NULL, - -- Prevents duplicates for parameters in the same scope. - UNIQUE(name, scope, scope_id) -); - -- Stores project version parameters parsed on import. -- No secrets are stored here. -- @@ -103,12 +88,10 @@ CREATE TABLE parameter_schema ( name varchar(64) NOT NULL, description varchar(8192) NOT NULL DEFAULT '', default_source_scheme parameter_source_scheme, - default_source_value text, + default_source_value text NOT NULL, -- Allows the user to override the source. allow_override_source boolean NOT null, - -- eg. env://SOME_VARIABLE, tfvars://example default_destination_scheme parameter_destination_scheme, - default_destination_value text, -- Allows the user to override the destination. allow_override_destination boolean NOT null, default_refresh text NOT NULL, @@ -121,3 +104,28 @@ CREATE TABLE parameter_schema ( validation_value_type varchar(64) NOT NULL, UNIQUE(job_id, name) ); + +-- Parameters are provided to jobs for provisioning and to workspaces. +CREATE TABLE parameter_value ( + id uuid NOT NULL UNIQUE, + name varchar(64) NOT NULL, + created_at timestamptz NOT NULL, + updated_at timestamptz NOT NULL, + scope parameter_scope NOT NULL, + scope_id text NOT NULL, + source_scheme parameter_source_scheme NOT NULL, + source_value text NOT NULL, + destination_scheme parameter_destination_scheme NOT NULL, + -- Prevents duplicates for parameters in the same scope. + UNIQUE(name, scope, scope_id) +); + +-- Resources from multiple workspace states are stored here post project-import job. +CREATE TABLE project_import_job_resource ( + id uuid NOT NULL UNIQUE, + created_at timestamptz NOT NULL, + job_id uuid NOT NULL REFERENCES provisioner_job(id) ON DELETE CASCADE, + transition workspace_transition NOT NULL, + type varchar(256) NOT NULL, + name varchar(64) NOT NULL +); diff --git a/database/models.go b/database/models.go index 04500cffd0005..a0e60915feb23 100644 --- a/database/models.go +++ b/database/models.go @@ -97,6 +97,7 @@ type ParameterScope string const ( ParameterScopeOrganization ParameterScope = "organization" ParameterScopeProject ParameterScope = "project" + ParameterScopeImportJob ParameterScope = "import_job" ParameterScopeUser ParameterScope = "user" ParameterScopeWorkspace ParameterScope = "workspace" ) @@ -307,10 +308,9 @@ type ParameterSchema struct { Name string `db:"name" json:"name"` Description string `db:"description" json:"description"` DefaultSourceScheme ParameterSourceScheme `db:"default_source_scheme" json:"default_source_scheme"` - DefaultSourceValue sql.NullString `db:"default_source_value" json:"default_source_value"` + DefaultSourceValue string `db:"default_source_value" json:"default_source_value"` AllowOverrideSource bool `db:"allow_override_source" json:"allow_override_source"` DefaultDestinationScheme ParameterDestinationScheme `db:"default_destination_scheme" json:"default_destination_scheme"` - DefaultDestinationValue sql.NullString `db:"default_destination_value" json:"default_destination_value"` AllowOverrideDestination bool `db:"allow_override_destination" json:"allow_override_destination"` DefaultRefresh string `db:"default_refresh" json:"default_refresh"` RedisplayValue bool `db:"redisplay_value" json:"redisplay_value"` @@ -330,7 +330,6 @@ type ParameterValue struct { SourceScheme ParameterSourceScheme `db:"source_scheme" json:"source_scheme"` SourceValue string `db:"source_value" json:"source_value"` DestinationScheme ParameterDestinationScheme `db:"destination_scheme" json:"destination_scheme"` - DestinationValue string `db:"destination_value" json:"destination_value"` } type Project struct { @@ -343,6 +342,15 @@ type Project struct { ActiveVersionID uuid.UUID `db:"active_version_id" json:"active_version_id"` } +type ProjectImportJobResource struct { + ID uuid.UUID `db:"id" json:"id"` + CreatedAt time.Time `db:"created_at" json:"created_at"` + JobID uuid.UUID `db:"job_id" json:"job_id"` + Transition WorkspaceTransition `db:"transition" json:"transition"` + Type string `db:"type" json:"type"` + Name string `db:"name" json:"name"` +} + type ProjectVersion struct { ID uuid.UUID `db:"id" json:"id"` ProjectID uuid.UUID `db:"project_id" json:"project_id"` diff --git a/database/querier.go b/database/querier.go index 05fc044eeb95b..93ba57d931e93 100644 --- a/database/querier.go +++ b/database/querier.go @@ -20,6 +20,7 @@ type querier interface { GetParameterValuesByScope(ctx context.Context, arg GetParameterValuesByScopeParams) ([]ParameterValue, error) GetProjectByID(ctx context.Context, id uuid.UUID) (Project, error) GetProjectByOrganizationAndName(ctx context.Context, arg GetProjectByOrganizationAndNameParams) (Project, error) + GetProjectImportJobResourcesByJobID(ctx context.Context, jobID uuid.UUID) ([]ProjectImportJobResource, error) GetProjectVersionByID(ctx context.Context, id uuid.UUID) (ProjectVersion, error) GetProjectVersionByProjectIDAndName(ctx context.Context, arg GetProjectVersionByProjectIDAndNameParams) (ProjectVersion, error) GetProjectVersionsByProjectID(ctx context.Context, projectID uuid.UUID) ([]ProjectVersion, error) @@ -48,6 +49,7 @@ type querier interface { InsertParameterSchema(ctx context.Context, arg InsertParameterSchemaParams) (ParameterSchema, error) InsertParameterValue(ctx context.Context, arg InsertParameterValueParams) (ParameterValue, error) InsertProject(ctx context.Context, arg InsertProjectParams) (Project, error) + InsertProjectImportJobResource(ctx context.Context, arg InsertProjectImportJobResourceParams) (ProjectImportJobResource, error) InsertProjectVersion(ctx context.Context, arg InsertProjectVersionParams) (ProjectVersion, error) InsertProvisionerDaemon(ctx context.Context, arg InsertProvisionerDaemonParams) (ProvisionerDaemon, error) InsertProvisionerJob(ctx context.Context, arg InsertProvisionerJobParams) (ProvisionerJob, error) diff --git a/database/query.sql b/database/query.sql index 26abf1893f36a..2bffab68704d2 100644 --- a/database/query.sql +++ b/database/query.sql @@ -173,6 +173,14 @@ FROM WHERE job_id = $1; +-- name: GetProjectImportJobResourcesByJobID :many +SELECT + * +FROM + project_import_job_resource +WHERE + job_id = $1; + -- name: GetProjectVersionsByProjectID :many SELECT * @@ -408,11 +416,10 @@ INSERT INTO scope_id, source_scheme, source_value, - destination_scheme, - destination_value + destination_scheme ) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) RETURNING *; + ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING *; -- name: InsertProject :one INSERT INTO @@ -428,6 +435,12 @@ INSERT INTO VALUES ($1, $2, $3, $4, $5, $6, $7) RETURNING *; +-- name: InsertProjectImportJobResource :one +INSERT INTO + project_import_job_resource (id, created_at, job_id, transition, type, name) +VALUES + ($1, $2, $3, $4, $5, $6) RETURNING *; + -- name: InsertProjectVersion :one INSERT INTO project_version ( @@ -454,7 +467,6 @@ INSERT INTO default_source_value, allow_override_source, default_destination_scheme, - default_destination_value, allow_override_destination, default_refresh, redisplay_value, @@ -480,8 +492,7 @@ VALUES $13, $14, $15, - $16, - $17 + $16 ) RETURNING *; -- name: InsertProvisionerDaemon :one diff --git a/database/query.sql.go b/database/query.sql.go index 8b63ca467b94b..5b2f3d8bae011 100644 --- a/database/query.sql.go +++ b/database/query.sql.go @@ -271,7 +271,7 @@ func (q *sqlQuerier) GetOrganizationsByUserID(ctx context.Context, userID string const getParameterSchemasByJobID = `-- name: GetParameterSchemasByJobID :many SELECT - id, created_at, job_id, name, description, default_source_scheme, default_source_value, allow_override_source, default_destination_scheme, default_destination_value, allow_override_destination, default_refresh, redisplay_value, validation_error, validation_condition, validation_type_system, validation_value_type + id, created_at, job_id, name, description, default_source_scheme, default_source_value, allow_override_source, default_destination_scheme, allow_override_destination, default_refresh, redisplay_value, validation_error, validation_condition, validation_type_system, validation_value_type FROM parameter_schema WHERE @@ -297,7 +297,6 @@ func (q *sqlQuerier) GetParameterSchemasByJobID(ctx context.Context, jobID uuid. &i.DefaultSourceValue, &i.AllowOverrideSource, &i.DefaultDestinationScheme, - &i.DefaultDestinationValue, &i.AllowOverrideDestination, &i.DefaultRefresh, &i.RedisplayValue, @@ -321,7 +320,7 @@ func (q *sqlQuerier) GetParameterSchemasByJobID(ctx context.Context, jobID uuid. const getParameterValuesByScope = `-- name: GetParameterValuesByScope :many SELECT - id, name, created_at, updated_at, scope, scope_id, source_scheme, source_value, destination_scheme, destination_value + id, name, created_at, updated_at, scope, scope_id, source_scheme, source_value, destination_scheme FROM parameter_value WHERE @@ -353,7 +352,6 @@ func (q *sqlQuerier) GetParameterValuesByScope(ctx context.Context, arg GetParam &i.SourceScheme, &i.SourceValue, &i.DestinationScheme, - &i.DestinationValue, ); err != nil { return nil, err } @@ -426,6 +424,45 @@ func (q *sqlQuerier) GetProjectByOrganizationAndName(ctx context.Context, arg Ge return i, err } +const getProjectImportJobResourcesByJobID = `-- name: GetProjectImportJobResourcesByJobID :many +SELECT + id, created_at, job_id, transition, type, name +FROM + project_import_job_resource +WHERE + job_id = $1 +` + +func (q *sqlQuerier) GetProjectImportJobResourcesByJobID(ctx context.Context, jobID uuid.UUID) ([]ProjectImportJobResource, error) { + rows, err := q.db.QueryContext(ctx, getProjectImportJobResourcesByJobID, jobID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ProjectImportJobResource + for rows.Next() { + var i ProjectImportJobResource + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.JobID, + &i.Transition, + &i.Type, + &i.Name, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getProjectVersionByID = `-- name: GetProjectVersionByID :one SELECT id, project_id, created_at, updated_at, name, description, import_job_id @@ -1378,7 +1415,6 @@ INSERT INTO default_source_value, allow_override_source, default_destination_scheme, - default_destination_value, allow_override_destination, default_refresh, redisplay_value, @@ -1404,9 +1440,8 @@ VALUES $13, $14, $15, - $16, - $17 - ) RETURNING id, created_at, job_id, name, description, default_source_scheme, default_source_value, allow_override_source, default_destination_scheme, default_destination_value, allow_override_destination, default_refresh, redisplay_value, validation_error, validation_condition, validation_type_system, validation_value_type + $16 + ) RETURNING id, created_at, job_id, name, description, default_source_scheme, default_source_value, allow_override_source, default_destination_scheme, allow_override_destination, default_refresh, redisplay_value, validation_error, validation_condition, validation_type_system, validation_value_type ` type InsertParameterSchemaParams struct { @@ -1416,10 +1451,9 @@ type InsertParameterSchemaParams struct { Name string `db:"name" json:"name"` Description string `db:"description" json:"description"` DefaultSourceScheme ParameterSourceScheme `db:"default_source_scheme" json:"default_source_scheme"` - DefaultSourceValue sql.NullString `db:"default_source_value" json:"default_source_value"` + DefaultSourceValue string `db:"default_source_value" json:"default_source_value"` AllowOverrideSource bool `db:"allow_override_source" json:"allow_override_source"` DefaultDestinationScheme ParameterDestinationScheme `db:"default_destination_scheme" json:"default_destination_scheme"` - DefaultDestinationValue sql.NullString `db:"default_destination_value" json:"default_destination_value"` AllowOverrideDestination bool `db:"allow_override_destination" json:"allow_override_destination"` DefaultRefresh string `db:"default_refresh" json:"default_refresh"` RedisplayValue bool `db:"redisplay_value" json:"redisplay_value"` @@ -1440,7 +1474,6 @@ func (q *sqlQuerier) InsertParameterSchema(ctx context.Context, arg InsertParame arg.DefaultSourceValue, arg.AllowOverrideSource, arg.DefaultDestinationScheme, - arg.DefaultDestinationValue, arg.AllowOverrideDestination, arg.DefaultRefresh, arg.RedisplayValue, @@ -1460,7 +1493,6 @@ func (q *sqlQuerier) InsertParameterSchema(ctx context.Context, arg InsertParame &i.DefaultSourceValue, &i.AllowOverrideSource, &i.DefaultDestinationScheme, - &i.DefaultDestinationValue, &i.AllowOverrideDestination, &i.DefaultRefresh, &i.RedisplayValue, @@ -1483,11 +1515,10 @@ INSERT INTO scope_id, source_scheme, source_value, - destination_scheme, - destination_value + destination_scheme ) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) RETURNING id, name, created_at, updated_at, scope, scope_id, source_scheme, source_value, destination_scheme, destination_value + ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING id, name, created_at, updated_at, scope, scope_id, source_scheme, source_value, destination_scheme ` type InsertParameterValueParams struct { @@ -1500,7 +1531,6 @@ type InsertParameterValueParams struct { SourceScheme ParameterSourceScheme `db:"source_scheme" json:"source_scheme"` SourceValue string `db:"source_value" json:"source_value"` DestinationScheme ParameterDestinationScheme `db:"destination_scheme" json:"destination_scheme"` - DestinationValue string `db:"destination_value" json:"destination_value"` } func (q *sqlQuerier) InsertParameterValue(ctx context.Context, arg InsertParameterValueParams) (ParameterValue, error) { @@ -1514,7 +1544,6 @@ func (q *sqlQuerier) InsertParameterValue(ctx context.Context, arg InsertParamet arg.SourceScheme, arg.SourceValue, arg.DestinationScheme, - arg.DestinationValue, ) var i ParameterValue err := row.Scan( @@ -1527,7 +1556,6 @@ func (q *sqlQuerier) InsertParameterValue(ctx context.Context, arg InsertParamet &i.SourceScheme, &i.SourceValue, &i.DestinationScheme, - &i.DestinationValue, ) return i, err } @@ -1580,6 +1608,43 @@ func (q *sqlQuerier) InsertProject(ctx context.Context, arg InsertProjectParams) return i, err } +const insertProjectImportJobResource = `-- name: InsertProjectImportJobResource :one +INSERT INTO + project_import_job_resource (id, created_at, job_id, transition, type, name) +VALUES + ($1, $2, $3, $4, $5, $6) RETURNING id, created_at, job_id, transition, type, name +` + +type InsertProjectImportJobResourceParams struct { + ID uuid.UUID `db:"id" json:"id"` + CreatedAt time.Time `db:"created_at" json:"created_at"` + JobID uuid.UUID `db:"job_id" json:"job_id"` + Transition WorkspaceTransition `db:"transition" json:"transition"` + Type string `db:"type" json:"type"` + Name string `db:"name" json:"name"` +} + +func (q *sqlQuerier) InsertProjectImportJobResource(ctx context.Context, arg InsertProjectImportJobResourceParams) (ProjectImportJobResource, error) { + row := q.db.QueryRowContext(ctx, insertProjectImportJobResource, + arg.ID, + arg.CreatedAt, + arg.JobID, + arg.Transition, + arg.Type, + arg.Name, + ) + var i ProjectImportJobResource + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.JobID, + &i.Transition, + &i.Type, + &i.Name, + ) + return i, err +} + const insertProjectVersion = `-- name: InsertProjectVersion :one INSERT INTO project_version ( diff --git a/go.mod b/go.mod index ffd3165824c4a..7a692c6352a8e 100644 --- a/go.mod +++ b/go.mod @@ -11,6 +11,9 @@ replace github.com/hashicorp/terraform-exec => github.com/kylecarbs/terraform-ex // Required until https://github.com/hashicorp/terraform-config-inspect/pull/74 is merged. replace github.com/hashicorp/terraform-config-inspect => github.com/kylecarbs/terraform-config-inspect v0.0.0-20211215004401-bbc517866b88 +// Required until https://github.com/chzyer/readline/pull/198 is merged. +replace github.com/chzyer/readline => github.com/kylecarbs/readline v0.0.0-20220211054233-0d62993714c8 + require ( cdr.dev/slog v1.4.1 github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2 @@ -42,6 +45,7 @@ require ( github.com/spf13/cobra v1.3.0 github.com/stretchr/testify v1.7.0 github.com/unrolled/secure v1.0.9 + github.com/xlab/treeprint v1.1.0 go.uber.org/atomic v1.9.0 go.uber.org/goleak v1.1.12 golang.org/x/crypto v0.0.0-20220131195533-30dcbda58838 diff --git a/go.sum b/go.sum index d14ed9f0057d8..231783ed456a6 100644 --- a/go.sum +++ b/go.sum @@ -212,8 +212,6 @@ github.com/checkpoint-restore/go-criu/v5 v5.0.0/go.mod h1:cfwC0EG7HMUenopBsUf9d8 github.com/checkpoint-restore/go-criu/v5 v5.3.0/go.mod h1:E/eQpaFtUKGOOSEBZgmKAcn+zUUwWxqcaKZlF54wK8E= github.com/chzyer/logex v1.1.10 h1:Swpa1K6QvQznwJRcfTfQJmTE72DqScAa40E+fbHEXEE= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e h1:fY5BOSpyZCqRo5OhCuC+XN+r/bBCmeuuJtjz+bCNIf8= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1 h1:q763qf9huN11kDQavWsoZXJNW3xEE4JJyHa5Q25/sd8= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/cilium/ebpf v0.0.0-20200110133405-4032b1d8aae3/go.mod h1:MA5e5Lr8slmEg9bt0VpxxWqJlO4iwu3FBdHUzV7wQVg= @@ -843,6 +841,8 @@ github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/ktrysmt/go-bitbucket v0.6.4/go.mod h1:9u0v3hsd2rqCHRIpbir1oP7F58uo5dq19sBYvuMoyQ4= github.com/kylecarbs/promptui v0.8.1-0.20201231190244-d8f2159af2b2 h1:MUREBTh4kybLY1KyuBfSx+QPfTB8XiUHs6ZxUhOPTnU= github.com/kylecarbs/promptui v0.8.1-0.20201231190244-d8f2159af2b2/go.mod h1:n4zTdgP0vr0S3w7/O/g98U+e0gwLScEXGwov2nIKuGQ= +github.com/kylecarbs/readline v0.0.0-20220211054233-0d62993714c8 h1:Y7O3Z3YeNRtw14QrtHpevU4dSjCkov0J40MtQ7Nc0n8= +github.com/kylecarbs/readline v0.0.0-20220211054233-0d62993714c8/go.mod h1:n/KX1BZoN1m9EwoXkn/xAV4fd3k8c++gGBsgLONaPOY= github.com/kylecarbs/terraform-config-inspect v0.0.0-20211215004401-bbc517866b88 h1:tvG/qs5c4worwGyGnbbb4i/dYYLjpFwDMqcIT3awAf8= github.com/kylecarbs/terraform-config-inspect v0.0.0-20211215004401-bbc517866b88/go.mod h1:Z0Nnk4+3Cy89smEbrq+sl1bxc9198gIP4I7wcQF6Kqs= github.com/kylecarbs/terraform-exec v0.15.1-0.20220202050609-a1ce7181b180 h1:yafC0pmxjs18fnO5RdKFLSItJIjYwGfSHTfcUvlZb3E= @@ -1240,6 +1240,8 @@ github.com/xeipuuv/gojsonschema v0.0.0-20180618132009-1d523034197f/go.mod h1:5yf github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +github.com/xlab/treeprint v1.1.0 h1:G/1DjNkPpfZCFt9CSh6b5/nY4VimlbHF3Rh4obvtzDk= +github.com/xlab/treeprint v1.1.0/go.mod h1:gj5Gd3gPdKtR1ikdDK6fnFLdmIS0X30kTTuNd/WEJu0= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= diff --git a/provisioner/terraform/parse.go b/provisioner/terraform/parse.go index c0ab765c1bf67..dcdf77fa3a874 100644 --- a/provisioner/terraform/parse.go +++ b/provisioner/terraform/parse.go @@ -44,8 +44,11 @@ func convertVariableToParameter(variable *tfconfig.Variable) (*proto.ParameterSc schema := &proto.ParameterSchema{ Name: variable.Name, Description: variable.Description, - RedisplayValue: variable.Sensitive, + RedisplayValue: !variable.Sensitive, ValidationValueType: variable.Type, + DefaultDestination: &proto.ParameterDestination{ + Scheme: proto.ParameterDestination_PROVISIONER_VARIABLE, + }, } if variable.Default != nil { @@ -57,10 +60,6 @@ func convertVariableToParameter(variable *tfconfig.Variable) (*proto.ParameterSc Scheme: proto.ParameterSource_DATA, Value: string(defaultData), } - schema.DefaultDestination = &proto.ParameterDestination{ - Scheme: proto.ParameterDestination_PROVISIONER_VARIABLE, - Value: variable.Name, - } } if len(variable.Validations) > 0 && variable.Validations[0].Condition != nil { diff --git a/provisioner/terraform/parse_test.go b/provisioner/terraform/parse_test.go index f1221de20e705..f77882db6b9bc 100644 --- a/provisioner/terraform/parse_test.go +++ b/provisioner/terraform/parse_test.go @@ -52,8 +52,12 @@ func TestParse(t *testing.T) { Type: &proto.Parse_Response_Complete{ Complete: &proto.Parse_Complete{ ParameterSchemas: []*proto.ParameterSchema{{ - Name: "A", - Description: "Testing!", + Name: "A", + RedisplayValue: true, + Description: "Testing!", + DefaultDestination: &proto.ParameterDestination{ + Scheme: proto.ParameterDestination_PROVISIONER_VARIABLE, + }, }}, }, }, @@ -69,14 +73,14 @@ func TestParse(t *testing.T) { Type: &proto.Parse_Response_Complete{ Complete: &proto.Parse_Complete{ ParameterSchemas: []*proto.ParameterSchema{{ - Name: "A", + Name: "A", + RedisplayValue: true, DefaultSource: &proto.ParameterSource{ Scheme: proto.ParameterSource_DATA, Value: "\"wow\"", }, DefaultDestination: &proto.ParameterDestination{ Scheme: proto.ParameterDestination_PROVISIONER_VARIABLE, - Value: "A", }, }}, }, @@ -96,10 +100,13 @@ func TestParse(t *testing.T) { Complete: &proto.Parse_Complete{ ParameterSchemas: []*proto.ParameterSchema{{ Name: "A", + RedisplayValue: true, ValidationCondition: `var.A == "value"`, ValidationTypeSystem: proto.ParameterSchema_HCL, - }, - }, + DefaultDestination: &proto.ParameterDestination{ + Scheme: proto.ParameterDestination_PROVISIONER_VARIABLE, + }, + }}, }, }, }, diff --git a/provisionerd/proto/provisionerd.pb.go b/provisionerd/proto/provisionerd.pb.go index d1cd2548b19fb..1c3db72fd341b 100644 --- a/provisionerd/proto/provisionerd.pb.go +++ b/provisionerd/proto/provisionerd.pb.go @@ -283,66 +283,6 @@ func (x *CancelledJob) GetError() string { return "" } -// TransitionedResource represents a resource that knows whether -// it's existence is dependent on stop or not. -// -// This is used on import to display start + stopped resources -// for the lifecycle of a workspace. -type TransitionedResource struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Resource *proto.Resource `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource,omitempty"` - DestroyOnStop bool `protobuf:"varint,2,opt,name=destroy_on_stop,json=destroyOnStop,proto3" json:"destroy_on_stop,omitempty"` -} - -func (x *TransitionedResource) Reset() { - *x = TransitionedResource{} - if protoimpl.UnsafeEnabled { - mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *TransitionedResource) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*TransitionedResource) ProtoMessage() {} - -func (x *TransitionedResource) ProtoReflect() protoreflect.Message { - mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use TransitionedResource.ProtoReflect.Descriptor instead. -func (*TransitionedResource) Descriptor() ([]byte, []int) { - return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{3} -} - -func (x *TransitionedResource) GetResource() *proto.Resource { - if x != nil { - return x.Resource - } - return nil -} - -func (x *TransitionedResource) GetDestroyOnStop() bool { - if x != nil { - return x.DestroyOnStop - } - return false -} - // CompletedJob is sent when the provisioner daemon completes a job. type CompletedJob struct { state protoimpl.MessageState @@ -359,7 +299,7 @@ type CompletedJob struct { func (x *CompletedJob) Reset() { *x = CompletedJob{} if protoimpl.UnsafeEnabled { - mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[4] + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -372,7 +312,7 @@ func (x *CompletedJob) String() string { func (*CompletedJob) ProtoMessage() {} func (x *CompletedJob) ProtoReflect() protoreflect.Message { - mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[4] + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -385,7 +325,7 @@ func (x *CompletedJob) ProtoReflect() protoreflect.Message { // Deprecated: Use CompletedJob.ProtoReflect.Descriptor instead. func (*CompletedJob) Descriptor() ([]byte, []int) { - return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{4} + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{3} } func (x *CompletedJob) GetJobId() string { @@ -447,7 +387,7 @@ type Log struct { func (x *Log) Reset() { *x = Log{} if protoimpl.UnsafeEnabled { - mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[5] + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -460,7 +400,7 @@ func (x *Log) String() string { func (*Log) ProtoMessage() {} func (x *Log) ProtoReflect() protoreflect.Message { - mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[5] + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -473,7 +413,7 @@ func (x *Log) ProtoReflect() protoreflect.Message { // Deprecated: Use Log.ProtoReflect.Descriptor instead. func (*Log) Descriptor() ([]byte, []int) { - return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{5} + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{4} } func (x *Log) GetSource() LogSource { @@ -504,35 +444,34 @@ func (x *Log) GetOutput() string { return "" } -// JobUpdate represents an update to a job. -// There may be no log output, but this message -// should still be sent periodically as a heartbeat. -type JobUpdate struct { +// This message should be sent periodically as a heartbeat. +type UpdateJobRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - JobId string `protobuf:"bytes,1,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` - Logs []*Log `protobuf:"bytes,2,rep,name=logs,proto3" json:"logs,omitempty"` + JobId string `protobuf:"bytes,1,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + Logs []*Log `protobuf:"bytes,2,rep,name=logs,proto3" json:"logs,omitempty"` + ParameterSchemas []*proto.ParameterSchema `protobuf:"bytes,3,rep,name=parameter_schemas,json=parameterSchemas,proto3" json:"parameter_schemas,omitempty"` } -func (x *JobUpdate) Reset() { - *x = JobUpdate{} +func (x *UpdateJobRequest) Reset() { + *x = UpdateJobRequest{} if protoimpl.UnsafeEnabled { - mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[6] + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } -func (x *JobUpdate) String() string { +func (x *UpdateJobRequest) String() string { return protoimpl.X.MessageStringOf(x) } -func (*JobUpdate) ProtoMessage() {} +func (*UpdateJobRequest) ProtoMessage() {} -func (x *JobUpdate) ProtoReflect() protoreflect.Message { - mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[6] +func (x *UpdateJobRequest) ProtoReflect() protoreflect.Message { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -543,25 +482,81 @@ func (x *JobUpdate) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use JobUpdate.ProtoReflect.Descriptor instead. -func (*JobUpdate) Descriptor() ([]byte, []int) { - return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{6} +// Deprecated: Use UpdateJobRequest.ProtoReflect.Descriptor instead. +func (*UpdateJobRequest) Descriptor() ([]byte, []int) { + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{5} } -func (x *JobUpdate) GetJobId() string { +func (x *UpdateJobRequest) GetJobId() string { if x != nil { return x.JobId } return "" } -func (x *JobUpdate) GetLogs() []*Log { +func (x *UpdateJobRequest) GetLogs() []*Log { if x != nil { return x.Logs } return nil } +func (x *UpdateJobRequest) GetParameterSchemas() []*proto.ParameterSchema { + if x != nil { + return x.ParameterSchemas + } + return nil +} + +type UpdateJobResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // If parameter schemas are sent, the job will respond + // with resolved parameter values. + ParameterValues []*proto.ParameterValue `protobuf:"bytes,1,rep,name=parameter_values,json=parameterValues,proto3" json:"parameter_values,omitempty"` +} + +func (x *UpdateJobResponse) Reset() { + *x = UpdateJobResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UpdateJobResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateJobResponse) ProtoMessage() {} + +func (x *UpdateJobResponse) ProtoReflect() protoreflect.Message { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateJobResponse.ProtoReflect.Descriptor instead. +func (*UpdateJobResponse) Descriptor() ([]byte, []int) { + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{6} +} + +func (x *UpdateJobResponse) GetParameterValues() []*proto.ParameterValue { + if x != nil { + return x.ParameterValues + } + return nil +} + type AcquiredJob_WorkspaceProvision struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -637,10 +632,6 @@ type AcquiredJob_ProjectImport struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - - ParameterValues []*proto.ParameterValue `protobuf:"bytes,1,rep,name=parameter_values,json=parameterValues,proto3" json:"parameter_values,omitempty"` - SkipParameterSchemas bool `protobuf:"varint,2,opt,name=skip_parameter_schemas,json=skipParameterSchemas,proto3" json:"skip_parameter_schemas,omitempty"` - SkipResources bool `protobuf:"varint,3,opt,name=skip_resources,json=skipResources,proto3" json:"skip_resources,omitempty"` } func (x *AcquiredJob_ProjectImport) Reset() { @@ -675,27 +666,6 @@ func (*AcquiredJob_ProjectImport) Descriptor() ([]byte, []int) { return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{1, 1} } -func (x *AcquiredJob_ProjectImport) GetParameterValues() []*proto.ParameterValue { - if x != nil { - return x.ParameterValues - } - return nil -} - -func (x *AcquiredJob_ProjectImport) GetSkipParameterSchemas() bool { - if x != nil { - return x.SkipParameterSchemas - } - return false -} - -func (x *AcquiredJob_ProjectImport) GetSkipResources() bool { - if x != nil { - return x.SkipResources - } - return false -} - type CompletedJob_WorkspaceProvision struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -734,7 +704,7 @@ func (x *CompletedJob_WorkspaceProvision) ProtoReflect() protoreflect.Message { // Deprecated: Use CompletedJob_WorkspaceProvision.ProtoReflect.Descriptor instead. func (*CompletedJob_WorkspaceProvision) Descriptor() ([]byte, []int) { - return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{4, 0} + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{3, 0} } func (x *CompletedJob_WorkspaceProvision) GetState() []byte { @@ -756,9 +726,8 @@ type CompletedJob_ProjectImport struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - ParameterSchemas []*proto.ParameterSchema `protobuf:"bytes,1,rep,name=parameter_schemas,json=parameterSchemas,proto3" json:"parameter_schemas,omitempty"` - StartResources []*proto.Resource `protobuf:"bytes,2,rep,name=start_resources,json=startResources,proto3" json:"start_resources,omitempty"` - StopResources []*proto.Resource `protobuf:"bytes,3,rep,name=stop_resources,json=stopResources,proto3" json:"stop_resources,omitempty"` + StartResources []*proto.Resource `protobuf:"bytes,1,rep,name=start_resources,json=startResources,proto3" json:"start_resources,omitempty"` + StopResources []*proto.Resource `protobuf:"bytes,2,rep,name=stop_resources,json=stopResources,proto3" json:"stop_resources,omitempty"` } func (x *CompletedJob_ProjectImport) Reset() { @@ -790,14 +759,7 @@ func (x *CompletedJob_ProjectImport) ProtoReflect() protoreflect.Message { // Deprecated: Use CompletedJob_ProjectImport.ProtoReflect.Descriptor instead. func (*CompletedJob_ProjectImport) Descriptor() ([]byte, []int) { - return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{4, 1} -} - -func (x *CompletedJob_ProjectImport) GetParameterSchemas() []*proto.ParameterSchema { - if x != nil { - return x.ParameterSchemas - } - return nil + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{3, 1} } func (x *CompletedJob_ProjectImport) GetStartResources() []*proto.Resource { @@ -823,7 +785,7 @@ var file_provisionerd_proto_provisionerd_proto_rawDesc = []byte{ 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x1a, 0x26, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x07, 0x0a, - 0x05, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0xf8, 0x05, 0x0a, 0x0b, 0x41, 0x63, 0x71, 0x75, 0x69, + 0x05, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0xd2, 0x04, 0x0a, 0x0b, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, @@ -859,102 +821,92 @@ var file_provisionerd_proto_provisionerd_proto_rawDesc = []byte{ 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x1a, 0xb4, 0x01, 0x0a, 0x0d, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6d, - 0x70, 0x6f, 0x72, 0x74, 0x12, 0x46, 0x0a, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, - 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0f, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x34, 0x0a, 0x16, - 0x73, 0x6b, 0x69, 0x70, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x14, 0x73, 0x6b, - 0x69, 0x70, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x73, 0x12, 0x25, 0x0a, 0x0e, 0x73, 0x6b, 0x69, 0x70, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0d, 0x73, 0x6b, 0x69, 0x70, - 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, - 0x65, 0x22, 0x3b, 0x0a, 0x0c, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x6c, 0x65, 0x64, 0x4a, 0x6f, - 0x62, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, - 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x71, - 0x0a, 0x14, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x65, 0x64, 0x52, 0x65, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x31, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, - 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x26, 0x0a, 0x0f, 0x64, 0x65, 0x73, - 0x74, 0x72, 0x6f, 0x79, 0x5f, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x6f, 0x70, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x08, 0x52, 0x0d, 0x64, 0x65, 0x73, 0x74, 0x72, 0x6f, 0x79, 0x4f, 0x6e, 0x53, 0x74, 0x6f, - 0x70, 0x22, 0x9e, 0x04, 0x0a, 0x0c, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x4a, - 0x6f, 0x62, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x60, 0x0a, 0x13, 0x77, 0x6f, 0x72, - 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x4a, - 0x6f, 0x62, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x51, 0x0a, 0x0e, 0x70, - 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x2e, - 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x48, 0x00, 0x52, - 0x0d, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x1a, 0x5f, - 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x1a, - 0xd8, 0x01, 0x0a, 0x0d, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6d, 0x70, 0x6f, 0x72, - 0x74, 0x12, 0x49, 0x0a, 0x11, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x10, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x12, 0x3e, 0x0a, 0x0f, - 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, - 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x0e, 0x73, 0x74, - 0x61, 0x72, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3c, 0x0a, 0x0e, - 0x73, 0x74, 0x6f, 0x70, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x03, + 0x74, 0x65, 0x1a, 0x0f, 0x0a, 0x0d, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6d, 0x70, + 0x6f, 0x72, 0x74, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x3b, 0x0a, 0x0c, 0x43, + 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x6c, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x12, 0x15, 0x0a, 0x06, 0x6a, + 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, + 0x49, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0xd3, 0x03, 0x0a, 0x0c, 0x43, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, + 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, + 0x12, 0x60, 0x0a, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, + 0x61, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x12, + 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x12, 0x51, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x69, 0x6d, + 0x70, 0x6f, 0x72, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x2e, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6d, + 0x70, 0x6f, 0x72, 0x74, 0x48, 0x00, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, + 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x1a, 0x5f, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, + 0x65, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x0d, 0x73, 0x74, 0x6f, - 0x70, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, - 0x70, 0x65, 0x22, 0x9a, 0x01, 0x0a, 0x03, 0x4c, 0x6f, 0x67, 0x12, 0x2f, 0x0a, 0x06, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4c, 0x6f, 0x67, 0x53, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x2b, 0x0a, 0x05, 0x6c, - 0x65, 0x76, 0x65, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, - 0x6c, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, - 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x63, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, - 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x22, - 0x49, 0x0a, 0x09, 0x4a, 0x6f, 0x62, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x15, 0x0a, 0x06, - 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, - 0x62, 0x49, 0x64, 0x12, 0x25, 0x0a, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, - 0x2e, 0x4c, 0x6f, 0x67, 0x52, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x2a, 0x34, 0x0a, 0x09, 0x4c, 0x6f, - 0x67, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x16, 0x0a, 0x12, 0x50, 0x52, 0x4f, 0x56, 0x49, - 0x53, 0x49, 0x4f, 0x4e, 0x45, 0x52, 0x5f, 0x44, 0x41, 0x45, 0x4d, 0x4f, 0x4e, 0x10, 0x00, 0x12, - 0x0f, 0x0a, 0x0b, 0x50, 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, 0x45, 0x52, 0x10, 0x01, - 0x32, 0x8c, 0x02, 0x0a, 0x11, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x44, 0x61, 0x65, 0x6d, 0x6f, 0x6e, 0x12, 0x3c, 0x0a, 0x0a, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, - 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, - 0x64, 0x4a, 0x6f, 0x62, 0x12, 0x3b, 0x0a, 0x09, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, - 0x62, 0x12, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, - 0x2e, 0x4a, 0x6f, 0x62, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x28, - 0x01, 0x12, 0x3c, 0x0a, 0x09, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x4a, 0x6f, 0x62, 0x12, 0x1a, - 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x61, - 0x6e, 0x63, 0x65, 0x6c, 0x6c, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, - 0x3e, 0x0a, 0x0b, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x1a, - 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x42, - 0x2b, 0x5a, 0x29, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, - 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x33, + 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x1a, 0x8d, 0x01, 0x0a, 0x0d, 0x50, 0x72, 0x6f, 0x6a, 0x65, + 0x63, 0x74, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x12, 0x3e, 0x0a, 0x0f, 0x73, 0x74, 0x61, 0x72, + 0x74, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, + 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x0e, 0x73, 0x74, 0x61, 0x72, 0x74, 0x52, + 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3c, 0x0a, 0x0e, 0x73, 0x74, 0x6f, 0x70, + 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, + 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x0d, 0x73, 0x74, 0x6f, 0x70, 0x52, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x9a, + 0x01, 0x0a, 0x03, 0x4c, 0x6f, 0x67, 0x12, 0x2f, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4c, 0x6f, 0x67, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, + 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x2b, 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x05, 0x6c, + 0x65, 0x76, 0x65, 0x6c, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, + 0x61, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, + 0x64, 0x41, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x22, 0x9b, 0x01, 0x0a, 0x10, + 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x25, 0x0a, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x18, + 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4c, 0x6f, 0x67, 0x52, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x12, 0x49, + 0x0a, 0x11, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, + 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x22, 0x5b, 0x0a, 0x11, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x46, + 0x0a, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2a, 0x34, 0x0a, 0x09, 0x4c, 0x6f, 0x67, 0x53, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x12, 0x16, 0x0a, 0x12, 0x50, 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, + 0x45, 0x52, 0x5f, 0x44, 0x41, 0x45, 0x4d, 0x4f, 0x4e, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x50, + 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, 0x45, 0x52, 0x10, 0x01, 0x32, 0x9d, 0x02, 0x0a, + 0x11, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x44, 0x61, 0x65, 0x6d, + 0x6f, 0x6e, 0x12, 0x3c, 0x0a, 0x0a, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x4a, 0x6f, 0x62, + 0x12, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, + 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x4a, 0x6f, 0x62, + 0x12, 0x4c, 0x0a, 0x09, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x1e, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3c, + 0x0a, 0x09, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x4a, 0x6f, 0x62, 0x12, 0x1a, 0x2e, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, + 0x6c, 0x6c, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, 0x3e, 0x0a, 0x0b, + 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x1a, 0x2e, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, + 0x65, 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x42, 0x2b, 0x5a, 0x29, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, + 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x33, } var ( @@ -976,47 +928,46 @@ var file_provisionerd_proto_provisionerd_proto_goTypes = []interface{}{ (*Empty)(nil), // 1: provisionerd.Empty (*AcquiredJob)(nil), // 2: provisionerd.AcquiredJob (*CancelledJob)(nil), // 3: provisionerd.CancelledJob - (*TransitionedResource)(nil), // 4: provisionerd.TransitionedResource - (*CompletedJob)(nil), // 5: provisionerd.CompletedJob - (*Log)(nil), // 6: provisionerd.Log - (*JobUpdate)(nil), // 7: provisionerd.JobUpdate + (*CompletedJob)(nil), // 4: provisionerd.CompletedJob + (*Log)(nil), // 5: provisionerd.Log + (*UpdateJobRequest)(nil), // 6: provisionerd.UpdateJobRequest + (*UpdateJobResponse)(nil), // 7: provisionerd.UpdateJobResponse (*AcquiredJob_WorkspaceProvision)(nil), // 8: provisionerd.AcquiredJob.WorkspaceProvision (*AcquiredJob_ProjectImport)(nil), // 9: provisionerd.AcquiredJob.ProjectImport (*CompletedJob_WorkspaceProvision)(nil), // 10: provisionerd.CompletedJob.WorkspaceProvision (*CompletedJob_ProjectImport)(nil), // 11: provisionerd.CompletedJob.ProjectImport - (*proto.Resource)(nil), // 12: provisioner.Resource - (proto.LogLevel)(0), // 13: provisioner.LogLevel + (proto.LogLevel)(0), // 12: provisioner.LogLevel + (*proto.ParameterSchema)(nil), // 13: provisioner.ParameterSchema (*proto.ParameterValue)(nil), // 14: provisioner.ParameterValue - (*proto.ParameterSchema)(nil), // 15: provisioner.ParameterSchema + (*proto.Resource)(nil), // 15: provisioner.Resource } var file_provisionerd_proto_provisionerd_proto_depIdxs = []int32{ 8, // 0: provisionerd.AcquiredJob.workspace_provision:type_name -> provisionerd.AcquiredJob.WorkspaceProvision 9, // 1: provisionerd.AcquiredJob.project_import:type_name -> provisionerd.AcquiredJob.ProjectImport - 12, // 2: provisionerd.TransitionedResource.resource:type_name -> provisioner.Resource - 10, // 3: provisionerd.CompletedJob.workspace_provision:type_name -> provisionerd.CompletedJob.WorkspaceProvision - 11, // 4: provisionerd.CompletedJob.project_import:type_name -> provisionerd.CompletedJob.ProjectImport - 0, // 5: provisionerd.Log.source:type_name -> provisionerd.LogSource - 13, // 6: provisionerd.Log.level:type_name -> provisioner.LogLevel - 6, // 7: provisionerd.JobUpdate.logs:type_name -> provisionerd.Log - 14, // 8: provisionerd.AcquiredJob.WorkspaceProvision.parameter_values:type_name -> provisioner.ParameterValue - 14, // 9: provisionerd.AcquiredJob.ProjectImport.parameter_values:type_name -> provisioner.ParameterValue - 12, // 10: provisionerd.CompletedJob.WorkspaceProvision.resources:type_name -> provisioner.Resource - 15, // 11: provisionerd.CompletedJob.ProjectImport.parameter_schemas:type_name -> provisioner.ParameterSchema - 12, // 12: provisionerd.CompletedJob.ProjectImport.start_resources:type_name -> provisioner.Resource - 12, // 13: provisionerd.CompletedJob.ProjectImport.stop_resources:type_name -> provisioner.Resource - 1, // 14: provisionerd.ProvisionerDaemon.AcquireJob:input_type -> provisionerd.Empty - 7, // 15: provisionerd.ProvisionerDaemon.UpdateJob:input_type -> provisionerd.JobUpdate - 3, // 16: provisionerd.ProvisionerDaemon.CancelJob:input_type -> provisionerd.CancelledJob - 5, // 17: provisionerd.ProvisionerDaemon.CompleteJob:input_type -> provisionerd.CompletedJob - 2, // 18: provisionerd.ProvisionerDaemon.AcquireJob:output_type -> provisionerd.AcquiredJob - 1, // 19: provisionerd.ProvisionerDaemon.UpdateJob:output_type -> provisionerd.Empty - 1, // 20: provisionerd.ProvisionerDaemon.CancelJob:output_type -> provisionerd.Empty - 1, // 21: provisionerd.ProvisionerDaemon.CompleteJob:output_type -> provisionerd.Empty - 18, // [18:22] is the sub-list for method output_type - 14, // [14:18] is the sub-list for method input_type - 14, // [14:14] is the sub-list for extension type_name - 14, // [14:14] is the sub-list for extension extendee - 0, // [0:14] is the sub-list for field type_name + 10, // 2: provisionerd.CompletedJob.workspace_provision:type_name -> provisionerd.CompletedJob.WorkspaceProvision + 11, // 3: provisionerd.CompletedJob.project_import:type_name -> provisionerd.CompletedJob.ProjectImport + 0, // 4: provisionerd.Log.source:type_name -> provisionerd.LogSource + 12, // 5: provisionerd.Log.level:type_name -> provisioner.LogLevel + 5, // 6: provisionerd.UpdateJobRequest.logs:type_name -> provisionerd.Log + 13, // 7: provisionerd.UpdateJobRequest.parameter_schemas:type_name -> provisioner.ParameterSchema + 14, // 8: provisionerd.UpdateJobResponse.parameter_values:type_name -> provisioner.ParameterValue + 14, // 9: provisionerd.AcquiredJob.WorkspaceProvision.parameter_values:type_name -> provisioner.ParameterValue + 15, // 10: provisionerd.CompletedJob.WorkspaceProvision.resources:type_name -> provisioner.Resource + 15, // 11: provisionerd.CompletedJob.ProjectImport.start_resources:type_name -> provisioner.Resource + 15, // 12: provisionerd.CompletedJob.ProjectImport.stop_resources:type_name -> provisioner.Resource + 1, // 13: provisionerd.ProvisionerDaemon.AcquireJob:input_type -> provisionerd.Empty + 6, // 14: provisionerd.ProvisionerDaemon.UpdateJob:input_type -> provisionerd.UpdateJobRequest + 3, // 15: provisionerd.ProvisionerDaemon.CancelJob:input_type -> provisionerd.CancelledJob + 4, // 16: provisionerd.ProvisionerDaemon.CompleteJob:input_type -> provisionerd.CompletedJob + 2, // 17: provisionerd.ProvisionerDaemon.AcquireJob:output_type -> provisionerd.AcquiredJob + 7, // 18: provisionerd.ProvisionerDaemon.UpdateJob:output_type -> provisionerd.UpdateJobResponse + 1, // 19: provisionerd.ProvisionerDaemon.CancelJob:output_type -> provisionerd.Empty + 1, // 20: provisionerd.ProvisionerDaemon.CompleteJob:output_type -> provisionerd.Empty + 17, // [17:21] is the sub-list for method output_type + 13, // [13:17] is the sub-list for method input_type + 13, // [13:13] is the sub-list for extension type_name + 13, // [13:13] is the sub-list for extension extendee + 0, // [0:13] is the sub-list for field type_name } func init() { file_provisionerd_proto_provisionerd_proto_init() } @@ -1062,7 +1013,7 @@ func file_provisionerd_proto_provisionerd_proto_init() { } } file_provisionerd_proto_provisionerd_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TransitionedResource); i { + switch v := v.(*CompletedJob); i { case 0: return &v.state case 1: @@ -1074,7 +1025,7 @@ func file_provisionerd_proto_provisionerd_proto_init() { } } file_provisionerd_proto_provisionerd_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CompletedJob); i { + switch v := v.(*Log); i { case 0: return &v.state case 1: @@ -1086,7 +1037,7 @@ func file_provisionerd_proto_provisionerd_proto_init() { } } file_provisionerd_proto_provisionerd_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Log); i { + switch v := v.(*UpdateJobRequest); i { case 0: return &v.state case 1: @@ -1098,7 +1049,7 @@ func file_provisionerd_proto_provisionerd_proto_init() { } } file_provisionerd_proto_provisionerd_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*JobUpdate); i { + switch v := v.(*UpdateJobResponse); i { case 0: return &v.state case 1: @@ -1162,7 +1113,7 @@ func file_provisionerd_proto_provisionerd_proto_init() { (*AcquiredJob_WorkspaceProvision_)(nil), (*AcquiredJob_ProjectImport_)(nil), } - file_provisionerd_proto_provisionerd_proto_msgTypes[4].OneofWrappers = []interface{}{ + file_provisionerd_proto_provisionerd_proto_msgTypes[3].OneofWrappers = []interface{}{ (*CompletedJob_WorkspaceProvision_)(nil), (*CompletedJob_ProjectImport_)(nil), } diff --git a/provisionerd/proto/provisionerd.proto b/provisionerd/proto/provisionerd.proto index 35c7298693f77..64a37bd3e05ef 100644 --- a/provisionerd/proto/provisionerd.proto +++ b/provisionerd/proto/provisionerd.proto @@ -18,9 +18,6 @@ message AcquiredJob { bytes state = 4; } message ProjectImport { - repeated provisioner.ParameterValue parameter_values = 1; - bool skip_parameter_schemas = 2; - bool skip_resources = 3; } string job_id = 1; int64 created_at = 2; @@ -38,16 +35,6 @@ message CancelledJob { string error = 2; } -// TransitionedResource represents a resource that knows whether -// it's existence is dependent on stop or not. -// -// This is used on import to display start + stopped resources -// for the lifecycle of a workspace. -message TransitionedResource { - provisioner.Resource resource = 1; - bool destroy_on_stop = 2; -} - // CompletedJob is sent when the provisioner daemon completes a job. message CompletedJob { message WorkspaceProvision { @@ -55,9 +42,8 @@ message CompletedJob { repeated provisioner.Resource resources = 2; } message ProjectImport { - repeated provisioner.ParameterSchema parameter_schemas = 1; - repeated provisioner.Resource start_resources = 2; - repeated provisioner.Resource stop_resources = 3; + repeated provisioner.Resource start_resources = 1; + repeated provisioner.Resource stop_resources = 2; } string job_id = 1; oneof type { @@ -80,12 +66,17 @@ message Log { string output = 4; } -// JobUpdate represents an update to a job. -// There may be no log output, but this message -// should still be sent periodically as a heartbeat. -message JobUpdate { +// This message should be sent periodically as a heartbeat. +message UpdateJobRequest { string job_id = 1; repeated Log logs = 2; + repeated provisioner.ParameterSchema parameter_schemas = 3; +} + +message UpdateJobResponse { + // If parameter schemas are sent, the job will respond + // with resolved parameter values. + repeated provisioner.ParameterValue parameter_values = 1; } service ProvisionerDaemon { @@ -97,8 +88,8 @@ service ProvisionerDaemon { // UpdateJob streams periodic updates for a job. // Implementations should buffer logs so this stream // is non-blocking. - rpc UpdateJob(stream JobUpdate) returns (Empty); - + rpc UpdateJob(UpdateJobRequest) returns (UpdateJobResponse); + // CancelJob indicates a job has been cancelled with // an error message. rpc CancelJob(CancelledJob) returns (Empty); diff --git a/provisionerd/proto/provisionerd_drpc.pb.go b/provisionerd/proto/provisionerd_drpc.pb.go index 10ade5583bc32..6e5a116239df3 100644 --- a/provisionerd/proto/provisionerd_drpc.pb.go +++ b/provisionerd/proto/provisionerd_drpc.pb.go @@ -39,7 +39,7 @@ type DRPCProvisionerDaemonClient interface { DRPCConn() drpc.Conn AcquireJob(ctx context.Context, in *Empty) (*AcquiredJob, error) - UpdateJob(ctx context.Context) (DRPCProvisionerDaemon_UpdateJobClient, error) + UpdateJob(ctx context.Context, in *UpdateJobRequest) (*UpdateJobResponse, error) CancelJob(ctx context.Context, in *CancelledJob) (*Empty, error) CompleteJob(ctx context.Context, in *CompletedJob) (*Empty, error) } @@ -63,45 +63,13 @@ func (c *drpcProvisionerDaemonClient) AcquireJob(ctx context.Context, in *Empty) return out, nil } -func (c *drpcProvisionerDaemonClient) UpdateJob(ctx context.Context) (DRPCProvisionerDaemon_UpdateJobClient, error) { - stream, err := c.cc.NewStream(ctx, "/provisionerd.ProvisionerDaemon/UpdateJob", drpcEncoding_File_provisionerd_proto_provisionerd_proto{}) +func (c *drpcProvisionerDaemonClient) UpdateJob(ctx context.Context, in *UpdateJobRequest) (*UpdateJobResponse, error) { + out := new(UpdateJobResponse) + err := c.cc.Invoke(ctx, "/provisionerd.ProvisionerDaemon/UpdateJob", drpcEncoding_File_provisionerd_proto_provisionerd_proto{}, in, out) if err != nil { return nil, err } - x := &drpcProvisionerDaemon_UpdateJobClient{stream} - return x, nil -} - -type DRPCProvisionerDaemon_UpdateJobClient interface { - drpc.Stream - Send(*JobUpdate) error - CloseAndRecv() (*Empty, error) -} - -type drpcProvisionerDaemon_UpdateJobClient struct { - drpc.Stream -} - -func (x *drpcProvisionerDaemon_UpdateJobClient) Send(m *JobUpdate) error { - return x.MsgSend(m, drpcEncoding_File_provisionerd_proto_provisionerd_proto{}) -} - -func (x *drpcProvisionerDaemon_UpdateJobClient) CloseAndRecv() (*Empty, error) { - if err := x.CloseSend(); err != nil { - return nil, err - } - m := new(Empty) - if err := x.MsgRecv(m, drpcEncoding_File_provisionerd_proto_provisionerd_proto{}); err != nil { - return nil, err - } - return m, nil -} - -func (x *drpcProvisionerDaemon_UpdateJobClient) CloseAndRecvMsg(m *Empty) error { - if err := x.CloseSend(); err != nil { - return err - } - return x.MsgRecv(m, drpcEncoding_File_provisionerd_proto_provisionerd_proto{}) + return out, nil } func (c *drpcProvisionerDaemonClient) CancelJob(ctx context.Context, in *CancelledJob) (*Empty, error) { @@ -124,7 +92,7 @@ func (c *drpcProvisionerDaemonClient) CompleteJob(ctx context.Context, in *Compl type DRPCProvisionerDaemonServer interface { AcquireJob(context.Context, *Empty) (*AcquiredJob, error) - UpdateJob(DRPCProvisionerDaemon_UpdateJobStream) error + UpdateJob(context.Context, *UpdateJobRequest) (*UpdateJobResponse, error) CancelJob(context.Context, *CancelledJob) (*Empty, error) CompleteJob(context.Context, *CompletedJob) (*Empty, error) } @@ -135,8 +103,8 @@ func (s *DRPCProvisionerDaemonUnimplementedServer) AcquireJob(context.Context, * return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) } -func (s *DRPCProvisionerDaemonUnimplementedServer) UpdateJob(DRPCProvisionerDaemon_UpdateJobStream) error { - return drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) +func (s *DRPCProvisionerDaemonUnimplementedServer) UpdateJob(context.Context, *UpdateJobRequest) (*UpdateJobResponse, error) { + return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) } func (s *DRPCProvisionerDaemonUnimplementedServer) CancelJob(context.Context, *CancelledJob) (*Empty, error) { @@ -165,9 +133,10 @@ func (DRPCProvisionerDaemonDescription) Method(n int) (string, drpc.Encoding, dr case 1: return "/provisionerd.ProvisionerDaemon/UpdateJob", drpcEncoding_File_provisionerd_proto_provisionerd_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { - return nil, srv.(DRPCProvisionerDaemonServer). + return srv.(DRPCProvisionerDaemonServer). UpdateJob( - &drpcProvisionerDaemon_UpdateJobStream{in1.(drpc.Stream)}, + ctx, + in1.(*UpdateJobRequest), ) }, DRPCProvisionerDaemonServer.UpdateJob, true case 2: @@ -215,33 +184,20 @@ func (x *drpcProvisionerDaemon_AcquireJobStream) SendAndClose(m *AcquiredJob) er type DRPCProvisionerDaemon_UpdateJobStream interface { drpc.Stream - SendAndClose(*Empty) error - Recv() (*JobUpdate, error) + SendAndClose(*UpdateJobResponse) error } type drpcProvisionerDaemon_UpdateJobStream struct { drpc.Stream } -func (x *drpcProvisionerDaemon_UpdateJobStream) SendAndClose(m *Empty) error { +func (x *drpcProvisionerDaemon_UpdateJobStream) SendAndClose(m *UpdateJobResponse) error { if err := x.MsgSend(m, drpcEncoding_File_provisionerd_proto_provisionerd_proto{}); err != nil { return err } return x.CloseSend() } -func (x *drpcProvisionerDaemon_UpdateJobStream) Recv() (*JobUpdate, error) { - m := new(JobUpdate) - if err := x.MsgRecv(m, drpcEncoding_File_provisionerd_proto_provisionerd_proto{}); err != nil { - return nil, err - } - return m, nil -} - -func (x *drpcProvisionerDaemon_UpdateJobStream) RecvMsg(m *JobUpdate) error { - return x.MsgRecv(m, drpcEncoding_File_provisionerd_proto_provisionerd_proto{}) -} - type DRPCProvisionerDaemon_CancelJobStream interface { drpc.Stream SendAndClose(*Empty) error diff --git a/provisionerd/provisionerd.go b/provisionerd/provisionerd.go index f771ad9094cd1..4c438d7360f66 100644 --- a/provisionerd/provisionerd.go +++ b/provisionerd/provisionerd.go @@ -19,11 +19,24 @@ import ( "golang.org/x/xerrors" "cdr.dev/slog" + "github.com/coder/coder/coderd/parameter" + "github.com/coder/coder/database" "github.com/coder/coder/provisionerd/proto" sdkproto "github.com/coder/coder/provisionersdk/proto" "github.com/coder/retry" ) +const ( + missingParameterErrorText = "missing parameter" +) + +// IsMissingParameterError returns whether the error message provided +// is a missing parameter error. This can indicate to consumers that +// they should check parameters. +func IsMissingParameterError(err string) bool { + return strings.Contains(err, missingParameterErrorText) +} + // Dialer represents the function to create a daemon client connection. type Dialer func(ctx context.Context) (proto.DRPCProvisionerDaemonClient, error) @@ -71,7 +84,6 @@ type provisionerDaemon struct { clientDialer Dialer client proto.DRPCProvisionerDaemonClient - updateStream proto.DRPCProvisionerDaemon_UpdateJobClient // Locked when closing the daemon. closeMutex sync.Mutex @@ -104,17 +116,6 @@ func (p *provisionerDaemon) connect(ctx context.Context) { p.opts.Logger.Warn(context.Background(), "failed to dial", slog.Error(err)) continue } - p.updateStream, err = p.client.UpdateJob(ctx) - if err != nil { - if errors.Is(err, context.Canceled) { - return - } - if p.isClosed() { - return - } - p.opts.Logger.Warn(context.Background(), "create update job stream", slog.Error(err)) - continue - } p.opts.Logger.Debug(context.Background(), "connected") break } @@ -131,11 +132,11 @@ func (p *provisionerDaemon) connect(ctx context.Context) { select { case <-p.closed: return - case <-p.updateStream.Context().Done(): + case <-p.client.DRPCConn().Closed(): // We use the update stream to detect when the connection // has been interrupted. This works well, because logs need // to buffer if a job is running in the background. - p.opts.Logger.Debug(context.Background(), "update stream ended", slog.Error(p.updateStream.Context().Err())) + p.opts.Logger.Debug(context.Background(), "client stream ended") p.connect(ctx) } }() @@ -150,7 +151,7 @@ func (p *provisionerDaemon) connect(ctx context.Context) { select { case <-p.closed: return - case <-p.updateStream.Context().Done(): + case <-p.client.DRPCConn().Closed(): return case <-ticker.C: p.acquireJob(ctx) @@ -219,7 +220,7 @@ func (p *provisionerDaemon) runJob(ctx context.Context, job *proto.AcquiredJob) case <-ctx.Done(): return case <-ticker.C: - err := p.updateStream.Send(&proto.JobUpdate{ + _, err := p.client.UpdateJob(ctx, &proto.UpdateJobRequest{ JobId: job.JobId, }) if err != nil { @@ -344,34 +345,67 @@ func (p *provisionerDaemon) runJob(ctx context.Context, job *proto.AcquiredJob) } func (p *provisionerDaemon) runProjectImport(ctx context.Context, provisioner sdkproto.DRPCProvisionerClient, job *proto.AcquiredJob) { - var parameterSchemas []*sdkproto.ParameterSchema - var startResources []*sdkproto.Resource - var stopResources []*sdkproto.Resource - var err error + parameterSchemas, err := p.runProjectImportParse(ctx, provisioner, job) + if err != nil { + p.cancelActiveJobf("run parse: %s", err) + return + } - if !job.GetProjectImport().SkipParameterSchemas { - parameterSchemas, err = p.runProjectImportParse(ctx, provisioner, job) - if err != nil { - p.cancelActiveJobf("run parse: %s", err) + updateResponse, err := p.client.UpdateJob(ctx, &proto.UpdateJobRequest{ + JobId: job.JobId, + ParameterSchemas: parameterSchemas, + }) + if err != nil { + p.cancelActiveJobf("update job: %s", err) + return + } + + valueByName := map[string]*sdkproto.ParameterValue{} + for _, parameterValue := range updateResponse.ParameterValues { + valueByName[parameterValue.Name] = parameterValue + } + for _, parameterSchema := range parameterSchemas { + if parameterSchema.Name == parameter.CoderWorkspaceTransition { + // Hardcode the workspace transition variable. We'll + // make it do stuff later! + continue + } + _, ok := valueByName[parameterSchema.Name] + if !ok { + p.cancelActiveJobf("%s: %s", missingParameterErrorText, parameterSchema.Name) return } } + // Checks if the schema has defined a workspace transition variable. + // If not, we don't need to check for resources provisioned in a stopped state. + hasWorkspaceTransition := false + for _, parameterSchema := range parameterSchemas { + if parameterSchema.Name != parameter.CoderWorkspaceTransition { + continue + } + hasWorkspaceTransition = true + break + } - if !job.GetProjectImport().SkipResources { - startResources, err = p.runProjectImportProvision(ctx, provisioner, job, append(job.GetProjectImport().GetParameterValues(), &sdkproto.ParameterValue{ + startParameters := updateResponse.ParameterValues + if hasWorkspaceTransition { + startParameters = append(updateResponse.ParameterValues, &sdkproto.ParameterValue{ DestinationScheme: sdkproto.ParameterDestination_PROVISIONER_VARIABLE, - // TODO: Make this a constant higher-up in the stack. - Name: "coder_workspace_transition", - Value: "start", - })) - if err != nil { - p.cancelActiveJobf("project import provision for start: %s", err) - return - } - stopResources, err = p.runProjectImportProvision(ctx, provisioner, job, append(job.GetProjectImport().GetParameterValues(), &sdkproto.ParameterValue{ + Name: parameter.CoderWorkspaceTransition, + Value: string(database.WorkspaceTransitionStart), + }) + } + startResources, err := p.runProjectImportProvision(ctx, provisioner, job, startParameters) + if err != nil { + p.cancelActiveJobf("project import provision for start: %s", err) + return + } + stopResources := startResources + if hasWorkspaceTransition { + stopResources, err = p.runProjectImportProvision(ctx, provisioner, job, append(updateResponse.ParameterValues, &sdkproto.ParameterValue{ DestinationScheme: sdkproto.ParameterDestination_PROVISIONER_VARIABLE, Name: "coder_workspace_transition", - Value: "stop", + Value: string(database.WorkspaceTransitionStop), })) if err != nil { p.cancelActiveJobf("project import provision for start: %s", err) @@ -383,9 +417,8 @@ func (p *provisionerDaemon) runProjectImport(ctx context.Context, provisioner sd JobId: job.JobId, Type: &proto.CompletedJob_ProjectImport_{ ProjectImport: &proto.CompletedJob_ProjectImport{ - ParameterSchemas: parameterSchemas, - StartResources: startResources, - StopResources: stopResources, + StartResources: startResources, + StopResources: stopResources, }, }, }) @@ -416,7 +449,7 @@ func (p *provisionerDaemon) runProjectImportParse(ctx context.Context, provision slog.F("output", msgType.Log.Output), ) - err = p.updateStream.Send(&proto.JobUpdate{ + _, err = p.client.UpdateJob(ctx, &proto.UpdateJobRequest{ JobId: job.JobId, Logs: []*proto.Log{{ Source: proto.LogSource_PROVISIONER, @@ -466,7 +499,7 @@ func (p *provisionerDaemon) runProjectImportProvision(ctx context.Context, provi slog.F("output", msgType.Log.Output), ) - err = p.updateStream.Send(&proto.JobUpdate{ + _, err = p.client.UpdateJob(ctx, &proto.UpdateJobRequest{ JobId: job.JobId, Logs: []*proto.Log{{ Source: proto.LogSource_PROVISIONER, @@ -519,7 +552,7 @@ func (p *provisionerDaemon) runWorkspaceProvision(ctx context.Context, provision slog.F("workspace_history_id", job.GetWorkspaceProvision().WorkspaceHistoryId), ) - err = p.updateStream.Send(&proto.JobUpdate{ + _, err = p.client.UpdateJob(ctx, &proto.UpdateJobRequest{ JobId: job.JobId, Logs: []*proto.Log{{ Source: proto.LogSource_PROVISIONER, diff --git a/provisionerd/provisionerd_test.go b/provisionerd/provisionerd_test.go index 99f9f254086d5..db7019a05f5dc 100644 --- a/provisionerd/provisionerd_test.go +++ b/provisionerd/provisionerd_test.go @@ -21,6 +21,7 @@ import ( "cdr.dev/slog" "cdr.dev/slog/sloggers/slogtest" + "github.com/coder/coder/coderd/parameter" "github.com/coder/coder/provisionerd" "github.com/coder/coder/provisionerd/proto" "github.com/coder/coder/provisionersdk" @@ -34,9 +35,8 @@ func TestMain(m *testing.M) { func TestProvisionerd(t *testing.T) { t.Parallel() - noopUpdateJob := func(stream proto.DRPCProvisionerDaemon_UpdateJobStream) error { - <-stream.Context().Done() - return nil + noopUpdateJob := func(ctx context.Context, update *proto.UpdateJobRequest) (*proto.UpdateJobResponse, error) { + return &proto.UpdateJobResponse{}, nil } t.Run("InstantClose", func(t *testing.T) { @@ -170,14 +170,9 @@ func TestProvisionerd(t *testing.T) { }, }, nil }, - updateJob: func(stream proto.DRPCProvisionerDaemon_UpdateJobStream) error { - for { - _, err := stream.Recv() - if err != nil { - return err - } - close(completeChan) - } + updateJob: func(ctx context.Context, update *proto.UpdateJobRequest) (*proto.UpdateJobResponse, error) { + close(completeChan) + return &proto.UpdateJobResponse{}, nil }, cancelJob: func(ctx context.Context, job *proto.CancelledJob) (*proto.Empty, error) { return &proto.Empty{}, nil @@ -201,6 +196,7 @@ func TestProvisionerd(t *testing.T) { didComplete atomic.Bool didLog atomic.Bool didAcquireJob atomic.Bool + didDryRun atomic.Bool ) completeChan := make(chan struct{}) closer := createProvisionerd(t, func(ctx context.Context) (proto.DRPCProvisionerDaemonClient, error) { @@ -222,18 +218,11 @@ func TestProvisionerd(t *testing.T) { }, }, nil }, - updateJob: func(stream proto.DRPCProvisionerDaemon_UpdateJobStream) error { - for { - msg, err := stream.Recv() - if err != nil { - return err - } - if len(msg.Logs) == 0 { - continue - } - + updateJob: func(ctx context.Context, update *proto.UpdateJobRequest) (*proto.UpdateJobResponse, error) { + if len(update.Logs) != 0 { didLog.Store(true) } + return &proto.UpdateJobResponse{}, nil }, completeJob: func(ctx context.Context, job *proto.CompletedJob) (*proto.Empty, error) { didComplete.Store(true) @@ -260,7 +249,9 @@ func TestProvisionerd(t *testing.T) { err = stream.Send(&sdkproto.Parse_Response{ Type: &sdkproto.Parse_Response_Complete{ Complete: &sdkproto.Parse_Complete{ - ParameterSchemas: []*sdkproto.ParameterSchema{}, + ParameterSchemas: []*sdkproto.ParameterSchema{{ + Name: parameter.CoderWorkspaceTransition, + }}, }, }, }) @@ -268,6 +259,9 @@ func TestProvisionerd(t *testing.T) { return nil }, provision: func(request *sdkproto.Provision_Request, stream sdkproto.DRPCProvisioner_ProvisionStream) error { + if request.DryRun { + didDryRun.Store(true) + } err := stream.Send(&sdkproto.Provision_Response{ Type: &sdkproto.Provision_Response_Log{ Log: &sdkproto.Log{ @@ -293,6 +287,7 @@ func TestProvisionerd(t *testing.T) { <-completeChan require.True(t, didLog.Load()) require.True(t, didComplete.Load()) + require.True(t, didDryRun.Load()) require.NoError(t, closer.Close()) }) @@ -323,18 +318,11 @@ func TestProvisionerd(t *testing.T) { }, }, nil }, - updateJob: func(stream proto.DRPCProvisionerDaemon_UpdateJobStream) error { - for { - msg, err := stream.Recv() - if err != nil { - return err - } - if len(msg.Logs) == 0 { - continue - } - + updateJob: func(ctx context.Context, update *proto.UpdateJobRequest) (*proto.UpdateJobResponse, error) { + if len(update.Logs) != 0 { didLog.Store(true) } + return &proto.UpdateJobResponse{}, nil }, completeJob: func(ctx context.Context, job *proto.CompletedJob) (*proto.Empty, error) { didComplete.Store(true) @@ -463,7 +451,7 @@ func (p *provisionerTestServer) Provision(request *sdkproto.Provision_Request, s // passable functions for dynamic functionality. type provisionerDaemonTestServer struct { acquireJob func(ctx context.Context, _ *proto.Empty) (*proto.AcquiredJob, error) - updateJob func(stream proto.DRPCProvisionerDaemon_UpdateJobStream) error + updateJob func(ctx context.Context, update *proto.UpdateJobRequest) (*proto.UpdateJobResponse, error) cancelJob func(ctx context.Context, job *proto.CancelledJob) (*proto.Empty, error) completeJob func(ctx context.Context, job *proto.CompletedJob) (*proto.Empty, error) } @@ -472,8 +460,8 @@ func (p *provisionerDaemonTestServer) AcquireJob(ctx context.Context, empty *pro return p.acquireJob(ctx, empty) } -func (p *provisionerDaemonTestServer) UpdateJob(stream proto.DRPCProvisionerDaemon_UpdateJobStream) error { - return p.updateJob(stream) +func (p *provisionerDaemonTestServer) UpdateJob(ctx context.Context, update *proto.UpdateJobRequest) (*proto.UpdateJobResponse, error) { + return p.updateJob(ctx, update) } func (p *provisionerDaemonTestServer) CancelJob(ctx context.Context, job *proto.CancelledJob) (*proto.Empty, error) { diff --git a/provisionersdk/proto/provisioner.pb.go b/provisionersdk/proto/provisioner.pb.go index 3dcb781e06499..18f034e0c7fd4 100644 --- a/provisionersdk/proto/provisioner.pb.go +++ b/provisionersdk/proto/provisioner.pb.go @@ -274,7 +274,6 @@ type ParameterDestination struct { unknownFields protoimpl.UnknownFields Scheme ParameterDestination_Scheme `protobuf:"varint,1,opt,name=scheme,proto3,enum=provisioner.ParameterDestination_Scheme" json:"scheme,omitempty"` - Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` } func (x *ParameterDestination) Reset() { @@ -316,13 +315,6 @@ func (x *ParameterDestination) GetScheme() ParameterDestination_Scheme { return ParameterDestination_ENVIRONMENT_VARIABLE } -func (x *ParameterDestination) GetValue() string { - if x != nil { - return x.Value - } - return "" -} - // ParameterValue represents the resolved source and destination of a parameter. type ParameterValue struct { state protoimpl.MessageState @@ -1099,133 +1091,132 @@ var file_provisionersdk_proto_provisioner_proto_rawDesc = []byte{ 0x63, 0x68, 0x65, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x12, 0x0a, 0x06, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x12, 0x08, 0x0a, 0x04, 0x44, 0x41, 0x54, 0x41, 0x10, 0x00, 0x22, - 0xac, 0x01, 0x0a, 0x14, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x44, 0x65, 0x73, + 0x96, 0x01, 0x0a, 0x14, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x40, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x28, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x63, 0x68, 0x65, - 0x6d, 0x65, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x22, 0x3c, 0x0a, 0x06, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x14, 0x45, 0x4e, - 0x56, 0x49, 0x52, 0x4f, 0x4e, 0x4d, 0x45, 0x4e, 0x54, 0x5f, 0x56, 0x41, 0x52, 0x49, 0x41, 0x42, - 0x4c, 0x45, 0x10, 0x00, 0x12, 0x18, 0x0a, 0x14, 0x50, 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, - 0x4e, 0x45, 0x52, 0x5f, 0x56, 0x41, 0x52, 0x49, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x01, 0x22, 0x93, - 0x01, 0x0a, 0x0e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x12, 0x57, 0x0a, 0x12, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x28, 0x2e, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x52, 0x11, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, - 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x22, 0x8d, 0x05, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, - 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x43, - 0x0a, 0x0e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x52, 0x0d, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x53, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x12, 0x32, 0x0a, 0x15, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x6f, 0x76, 0x65, - 0x72, 0x72, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x08, 0x52, 0x13, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, - 0x65, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x52, 0x0a, 0x13, 0x64, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x5f, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x44, 0x65, 0x73, 0x74, - 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x12, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, - 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3c, 0x0a, 0x1a, 0x61, - 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x6f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x5f, 0x64, 0x65, - 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, - 0x18, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x44, 0x65, - 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x0a, 0x0f, 0x72, 0x65, 0x64, - 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x07, 0x20, 0x01, - 0x28, 0x08, 0x52, 0x0e, 0x72, 0x65, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x12, 0x5d, 0x0a, 0x16, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x5f, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x18, 0x08, 0x20, 0x01, - 0x28, 0x0e, 0x32, 0x27, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x2e, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x52, 0x14, 0x76, 0x61, 0x6c, - 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, - 0x6d, 0x12, 0x32, 0x0a, 0x15, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x13, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x29, 0x0a, 0x10, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, - 0x12, 0x31, 0x0a, 0x14, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, - 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, - 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x22, 0x1f, 0x0a, 0x0a, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, - 0x6d, 0x12, 0x08, 0x0a, 0x04, 0x4e, 0x6f, 0x6e, 0x65, 0x10, 0x00, 0x12, 0x07, 0x0a, 0x03, 0x48, - 0x43, 0x4c, 0x10, 0x01, 0x22, 0x4a, 0x0a, 0x03, 0x4c, 0x6f, 0x67, 0x12, 0x2b, 0x0a, 0x05, 0x6c, - 0x65, 0x76, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, - 0x6c, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, - 0x75, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, - 0x22, 0x32, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, - 0x74, 0x79, 0x70, 0x65, 0x22, 0xfc, 0x01, 0x0a, 0x05, 0x50, 0x61, 0x72, 0x73, 0x65, 0x1a, 0x27, - 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x72, - 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, 0x69, - 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x1a, 0x55, 0x0a, 0x08, 0x43, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x65, 0x12, 0x49, 0x0a, 0x11, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, + 0x6d, 0x65, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x22, 0x3c, 0x0a, 0x06, 0x53, 0x63, + 0x68, 0x65, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x14, 0x45, 0x4e, 0x56, 0x49, 0x52, 0x4f, 0x4e, 0x4d, + 0x45, 0x4e, 0x54, 0x5f, 0x56, 0x41, 0x52, 0x49, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x00, 0x12, 0x18, + 0x0a, 0x14, 0x50, 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, 0x45, 0x52, 0x5f, 0x56, 0x41, + 0x52, 0x49, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x01, 0x22, 0x93, 0x01, 0x0a, 0x0e, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x57, 0x0a, 0x12, 0x64, + 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x28, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x44, + 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x65, 0x52, 0x11, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x63, + 0x68, 0x65, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x8d, + 0x05, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x43, 0x0a, 0x0e, 0x64, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x0d, + 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x32, 0x0a, + 0x15, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x6f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x5f, + 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x13, 0x61, 0x6c, + 0x6c, 0x6f, 0x77, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x53, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x12, 0x52, 0x0a, 0x13, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x64, 0x65, 0x73, + 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x10, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x1a, 0x73, - 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6c, 0x6f, - 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x03, 0x6c, 0x6f, 0x67, - 0x12, 0x39, 0x0a, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, - 0x00, 0x52, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x74, - 0x79, 0x70, 0x65, 0x22, 0xfc, 0x02, 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x1a, 0x9e, 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, - 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x46, 0x0a, 0x10, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, - 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x52, 0x0f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x64, 0x72, 0x79, - 0x5f, 0x72, 0x75, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x64, 0x72, 0x79, 0x52, - 0x75, 0x6e, 0x1a, 0x55, 0x0a, 0x08, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, - 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, - 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x1a, 0x77, 0x0a, 0x08, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x2e, 0x4c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x03, 0x6c, 0x6f, 0x67, 0x12, 0x3d, 0x0a, 0x08, 0x63, - 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, - 0x52, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, - 0x70, 0x65, 0x2a, 0x3f, 0x0a, 0x08, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, - 0x0a, 0x05, 0x54, 0x52, 0x41, 0x43, 0x45, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x45, 0x42, - 0x55, 0x47, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x49, 0x4e, 0x46, 0x4f, 0x10, 0x02, 0x12, 0x08, - 0x0a, 0x04, 0x57, 0x41, 0x52, 0x4e, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, - 0x52, 0x10, 0x04, 0x32, 0xa1, 0x01, 0x0a, 0x0b, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x12, 0x42, 0x0a, 0x05, 0x50, 0x61, 0x72, 0x73, 0x65, 0x12, 0x1a, 0x2e, 0x70, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x52, 0x12, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3c, 0x0a, 0x1a, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x6f, + 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x5f, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x18, 0x61, 0x6c, 0x6c, 0x6f, 0x77, + 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x0a, 0x0f, 0x72, 0x65, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, + 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, 0x72, 0x65, + 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x5d, 0x0a, 0x16, + 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x5f, + 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x27, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x53, + 0x79, 0x73, 0x74, 0x65, 0x6d, 0x52, 0x14, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x12, 0x32, 0x0a, 0x15, 0x76, + 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, + 0x74, 0x79, 0x70, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x76, 0x61, 0x6c, 0x69, + 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, + 0x29, 0x0a, 0x10, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, + 0x72, 0x6f, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x76, 0x61, 0x6c, 0x69, 0x64, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x31, 0x0a, 0x14, 0x76, 0x61, + 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x1f, 0x0a, + 0x0a, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x12, 0x08, 0x0a, 0x04, 0x4e, + 0x6f, 0x6e, 0x65, 0x10, 0x00, 0x12, 0x07, 0x0a, 0x03, 0x48, 0x43, 0x4c, 0x10, 0x01, 0x22, 0x4a, + 0x0a, 0x03, 0x4c, 0x6f, 0x67, 0x12, 0x2b, 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x05, 0x6c, 0x65, 0x76, + 0x65, 0x6c, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x22, 0x32, 0x0a, 0x08, 0x52, 0x65, + 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, + 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xfc, + 0x01, 0x0a, 0x05, 0x50, 0x61, 0x72, 0x73, 0x65, 0x1a, 0x27, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, + 0x79, 0x1a, 0x55, 0x0a, 0x08, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x49, 0x0a, + 0x11, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x1a, 0x73, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, + 0x4c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x03, 0x6c, 0x6f, 0x67, 0x12, 0x39, 0x0a, 0x08, 0x63, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, - 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x30, 0x01, 0x12, 0x4e, 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x30, 0x01, 0x42, 0x2d, 0x5a, 0x2b, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, - 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, 0x64, 0x6b, - 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x08, 0x63, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xfc, 0x02, + 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x1a, 0x9e, 0x01, 0x0a, 0x07, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x46, 0x0a, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, + 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0f, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x14, 0x0a, + 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, + 0x61, 0x74, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x64, 0x72, 0x79, 0x5f, 0x72, 0x75, 0x6e, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x64, 0x72, 0x79, 0x52, 0x75, 0x6e, 0x1a, 0x55, 0x0a, 0x08, + 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x33, + 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, + 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x73, 0x1a, 0x77, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x24, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x48, 0x00, + 0x52, 0x03, 0x6c, 0x6f, 0x67, 0x12, 0x3d, 0x0a, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x08, 0x63, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x2a, 0x3f, 0x0a, 0x08, + 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x54, 0x52, 0x41, 0x43, + 0x45, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x01, 0x12, 0x08, + 0x0a, 0x04, 0x49, 0x4e, 0x46, 0x4f, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x41, 0x52, 0x4e, + 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, 0x32, 0xa1, 0x01, + 0x0a, 0x0b, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x42, 0x0a, + 0x05, 0x50, 0x61, 0x72, 0x73, 0x65, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x30, + 0x01, 0x12, 0x4e, 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1e, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x30, + 0x01, 0x42, 0x2d, 0x5a, 0x2b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/provisionersdk/proto/provisioner.proto b/provisionersdk/proto/provisioner.proto index 546c081cd6882..0b999dffbc4fe 100644 --- a/provisionersdk/proto/provisioner.proto +++ b/provisionersdk/proto/provisioner.proto @@ -20,7 +20,6 @@ message ParameterDestination { PROVISIONER_VARIABLE = 1; } Scheme scheme = 1; - string value = 2; } // ParameterValue represents the resolved source and destination of a parameter. diff --git a/templates/null/main.tf b/templates/null/main.tf new file mode 100644 index 0000000000000..9bb3f2042e2a4 --- /dev/null +++ b/templates/null/main.tf @@ -0,0 +1,5 @@ +variable "bananas" { + description = "hello!" +} + +resource "null_resource" "example" {}