Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into no-sync-root-check-fo…
Browse files Browse the repository at this point in the history
…r-artifact-paths
  • Loading branch information
pietern committed Jan 13, 2025
2 parents 77e159f + 913e10a commit 7a633ec
Show file tree
Hide file tree
Showing 66 changed files with 3,038 additions and 144 deletions.
1 change: 0 additions & 1 deletion acceptance/bundle/override/job_cluster_var/databricks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ targets:
jobs:
foo:
job_clusters:
# This does not work because merging is done before resolution
- job_cluster_key: "${var.mykey}"
new_cluster:
node_type_id: i3.xlarge
Expand Down
18 changes: 4 additions & 14 deletions acceptance/bundle/override/job_cluster_var/output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,12 @@
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
"job_clusters": [
{
"job_cluster_key": "key",
"new_cluster": {
"spark_version": "13.3.x-scala2.12"
}
},
{
"job_cluster_key": "key",
"new_cluster": {
"node_type_id": "i3.xlarge",
"num_workers": 1
"num_workers": 1,
"spark_version": "13.3.x-scala2.12"
}
}
],
Expand Down Expand Up @@ -51,17 +46,12 @@ Validation OK!
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
"job_clusters": [
{
"job_cluster_key": "key",
"new_cluster": {
"spark_version": "13.3.x-scala2.12"
}
},
{
"job_cluster_key": "key",
"new_cluster": {
"node_type_id": "i3.2xlarge",
"num_workers": 4
"num_workers": 4,
"spark_version": "13.3.x-scala2.12"
}
}
],
Expand Down
10 changes: 10 additions & 0 deletions acceptance/bundle/variables/host/databricks.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
bundle:
name: host

variables:
host:
default: https://nonexistent123.staging.cloud.databricks.com

workspace:
# This is currently not supported
host: ${var.host}
38 changes: 38 additions & 0 deletions acceptance/bundle/variables/host/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@

>>> errcode $CLI bundle validate -o json
Error: failed during request visitor: parse "https://${var.host}": invalid character "{" in host name

{
"bundle": {
"environment": "default",
"name": "host",
"target": "default"
},
"sync": {
"paths": [
"."
]
},
"targets": null,
"variables": {
"host": {
"default": "https://nonexistent123.staging.cloud.databricks.com"
}
},
"workspace": {
"host": "${var.host}"
}
}
Exit code: 1

>>> errcode $CLI bundle validate
Error: failed during request visitor: parse "https://${var.host}": invalid character "{" in host name

Name: host
Target: default
Workspace:
Host: ${var.host}

Found 1 error

Exit code: 1
2 changes: 2 additions & 0 deletions acceptance/bundle/variables/host/script
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
trace errcode $CLI bundle validate -o json
trace errcode $CLI bundle validate
50 changes: 50 additions & 0 deletions bundle/apps/interpolate_variables.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
package apps

import (
"context"

"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/dynvar"
)

type interpolateVariables struct{}

func (i *interpolateVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
pattern := dyn.NewPattern(
dyn.Key("resources"),
dyn.Key("apps"),
dyn.AnyKey(),
dyn.Key("config"),
)

tfToConfigMap := map[string]string{}
for k, r := range config.SupportedResources() {
tfToConfigMap[r.TerraformResourceName] = k
}

err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) {
return dyn.MapByPattern(root, pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
return dynvar.Resolve(v, func(path dyn.Path) (dyn.Value, error) {
key, ok := tfToConfigMap[path[0].Key()]
if ok {
path = dyn.NewPath(dyn.Key("resources"), dyn.Key(key)).Append(path[1:]...)
}

return dyn.GetByPath(root, path)
})
})
})

return diag.FromErr(err)
}

func (i *interpolateVariables) Name() string {
return "apps.InterpolateVariables"
}

func InterpolateVariables() bundle.Mutator {
return &interpolateVariables{}
}
49 changes: 49 additions & 0 deletions bundle/apps/interpolate_variables_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
package apps

import (
"context"
"testing"

"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/stretchr/testify/require"
)

func TestAppInterpolateVariables(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Resources: config.Resources{
Apps: map[string]*resources.App{
"my_app_1": {
App: &apps.App{
Name: "my_app_1",
},
Config: map[string]any{
"command": []string{"echo", "hello"},
"env": []map[string]string{
{"name": "JOB_ID", "value": "${databricks_job.my_job.id}"},
},
},
},
"my_app_2": {
App: &apps.App{
Name: "my_app_2",
},
},
},
Jobs: map[string]*resources.Job{
"my_job": {
ID: "123",
},
},
},
},
}

diags := bundle.Apply(context.Background(), b, InterpolateVariables())
require.Empty(t, diags)
require.Equal(t, []any([]any{map[string]any{"name": "JOB_ID", "value": "123"}}), b.Config.Resources.Apps["my_app_1"].Config["env"])
require.Nil(t, b.Config.Resources.Apps["my_app_2"].Config)
}
97 changes: 97 additions & 0 deletions bundle/apps/upload_config.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
package apps

import (
"bytes"
"context"
"fmt"
"path"
"strings"
"sync"

"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/deploy"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/filer"
"golang.org/x/sync/errgroup"

"gopkg.in/yaml.v3"
)

type uploadConfig struct {
filerFactory deploy.FilerFactory
}

func (u *uploadConfig) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
var diags diag.Diagnostics
errGroup, ctx := errgroup.WithContext(ctx)

mu := sync.Mutex{}
for key, app := range b.Config.Resources.Apps {
// If the app has a config, we need to deploy it first.
// It means we need to write app.yml file with the content of the config field
// to the remote source code path of the app.
if app.Config != nil {
appPath := strings.TrimPrefix(app.SourceCodePath, b.Config.Workspace.FilePath)

buf, err := configToYaml(app)
if err != nil {
return diag.FromErr(err)
}

f, err := u.filerFactory(b)
if err != nil {
return diag.FromErr(err)
}

errGroup.Go(func() error {
err := f.Write(ctx, path.Join(appPath, "app.yml"), buf, filer.OverwriteIfExists)
if err != nil {
mu.Lock()
diags = append(diags, diag.Diagnostic{
Severity: diag.Error,
Summary: "Failed to save config",
Detail: fmt.Sprintf("Failed to write %s file: %s", path.Join(app.SourceCodePath, "app.yml"), err),
Locations: b.Config.GetLocations("resources.apps." + key),
})
mu.Unlock()
}
return nil
})
}
}

if err := errGroup.Wait(); err != nil {
return diags.Extend(diag.FromErr(err))
}

return diags
}

// Name implements bundle.Mutator.
func (u *uploadConfig) Name() string {
return "apps:UploadConfig"
}

func UploadConfig() bundle.Mutator {
return &uploadConfig{
filerFactory: func(b *bundle.Bundle) (filer.Filer, error) {
return filer.NewWorkspaceFilesClient(b.WorkspaceClient(), b.Config.Workspace.FilePath)
},
}
}

func configToYaml(app *resources.App) (*bytes.Buffer, error) {
buf := bytes.NewBuffer(nil)
enc := yaml.NewEncoder(buf)
enc.SetIndent(2)

err := enc.Encode(app.Config)
defer enc.Close()

if err != nil {
return nil, fmt.Errorf("failed to encode app config to yaml: %w", err)
}

return buf, nil
}
75 changes: 75 additions & 0 deletions bundle/apps/upload_config_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
package apps

import (
"bytes"
"context"
"os"
"path/filepath"
"testing"

"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/bundletest"
mockfiler "github.com/databricks/cli/internal/mocks/libs/filer"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/vfs"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
)

func TestAppUploadConfig(t *testing.T) {
root := t.TempDir()
err := os.MkdirAll(filepath.Join(root, "my_app"), 0o700)
require.NoError(t, err)

b := &bundle.Bundle{
BundleRootPath: root,
SyncRootPath: root,
SyncRoot: vfs.MustNew(root),
Config: config.Root{
Workspace: config.Workspace{
RootPath: "/Workspace/Users/[email protected]/",
},
Resources: config.Resources{
Apps: map[string]*resources.App{
"my_app": {
App: &apps.App{
Name: "my_app",
},
SourceCodePath: "./my_app",
Config: map[string]any{
"command": []string{"echo", "hello"},
"env": []map[string]string{
{"name": "MY_APP", "value": "my value"},
},
},
},
},
},
},
}

mockFiler := mockfiler.NewMockFiler(t)
mockFiler.EXPECT().Write(mock.Anything, "my_app/app.yml", bytes.NewBufferString(`command:
- echo
- hello
env:
- name: MY_APP
value: my value
`), filer.OverwriteIfExists).Return(nil)

u := uploadConfig{
filerFactory: func(b *bundle.Bundle) (filer.Filer, error) {
return mockFiler, nil
},
}

bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(root, "databricks.yml")}})

diags := bundle.Apply(context.Background(), b, bundle.Seq(mutator.TranslatePaths(), &u))
require.NoError(t, diags.Error())
}
Loading

0 comments on commit 7a633ec

Please sign in to comment.