diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 976b37f..5f559fe 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,57 +1,57 @@ -#------------------------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. -#------------------------------------------------------------------------------------------------------------- - -FROM golang:1.12.5 - -# Avoid warnings by switching to noninteractive -ENV DEBIAN_FRONTEND=noninteractive - -# Configure apt, install packages and tools -RUN apt-get update \ - && apt-get -y install --no-install-recommends apt-utils 2>&1 \ - # Verify git, process tools, lsb-release (common in install instructions for CLIs) installed - && apt-get -y install git procps lsb-release \ - # Install Editor - && apt-get install vim -y \ - # Install gocode-gomod - && go get -x -d github.com/stamblerre/gocode 2>&1 \ - && go build -o gocode-gomod github.com/stamblerre/gocode \ - && mv gocode-gomod $GOPATH/bin/ \ - # Clean up - && apt-get autoremove -y \ - && apt-get clean -y \ - && rm -rf /var/lib/apt/lists/* - -# Enable go modules -ENV GO111MODULE=on - -# Install Go tools -RUN \ - # --> Go language server - go get golang.org/x/tools/gopls@v0.3.3 \ - # --> GolangCI-lint - && curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sed 's/tar -/tar --no-same-owner -/g' | sh -s -- -b $(go env GOPATH)/bin \ - # --> Delve for debugging - && go get github.com/go-delve/delve/cmd/dlv@v1.4.0 \ - # --> Go-outline for extracting a JSON representation of the declarations in a Go source file - && go get -v github.com/ramya-rao-a/go-outline \ - && rm -rf /go/src/ && rm -rf /go/pkg - -RUN apt-get update \ - # Install Docker CE CLI - && apt-get install -y apt-transport-https ca-certificates curl gnupg-agent software-properties-common lsb-release \ - && curl -fsSL https://download.docker.com/linux/$(lsb_release -is | tr '[:upper:]' '[:lower:]')/gpg | apt-key add - 2>/dev/null \ - && add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/$(lsb_release -is | tr '[:upper:]' '[:lower:]') $(lsb_release -cs) stable" \ - && apt-get update \ - && apt-get install -y docker-ce-cli - -# Verify git, process tools installed -RUN apt-get -y install git procps wget nano zsh inotify-tools jq -RUN wget https://github.com/robbyrussell/oh-my-zsh/raw/master/tools/install.sh -O - | zsh || true - -COPY ./Makefile ./ - -RUN mkdir -p /go/src/github.com/xinsnake/databricks-sdk-golang +#------------------------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. +#------------------------------------------------------------------------------------------------------------- + +FROM golang:1.12.5 + +# Avoid warnings by switching to noninteractive +ENV DEBIAN_FRONTEND=noninteractive + +# Configure apt, install packages and tools +RUN apt-get update \ + && apt-get -y install --no-install-recommends apt-utils 2>&1 \ + # Verify git, process tools, lsb-release (common in install instructions for CLIs) installed + && apt-get -y install git procps lsb-release \ + # Install Editor + && apt-get install vim -y \ + # Install gocode-gomod + && go get -x -d github.com/stamblerre/gocode 2>&1 \ + && go build -o gocode-gomod github.com/stamblerre/gocode \ + && mv gocode-gomod $GOPATH/bin/ \ + # Clean up + && apt-get autoremove -y \ + && apt-get clean -y \ + && rm -rf /var/lib/apt/lists/* + +# Enable go modules +ENV GO111MODULE=on + +# Install Go tools +RUN \ + # --> Go language server + go get golang.org/x/tools/gopls@v0.3.3 \ + # --> GolangCI-lint + && curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sed 's/tar -/tar --no-same-owner -/g' | sh -s -- -b $(go env GOPATH)/bin \ + # --> Delve for debugging + && go get github.com/go-delve/delve/cmd/dlv@v1.4.0 \ + # --> Go-outline for extracting a JSON representation of the declarations in a Go source file + && go get -v github.com/ramya-rao-a/go-outline \ + && rm -rf /go/src/ && rm -rf /go/pkg + +RUN apt-get update \ + # Install Docker CE CLI + && apt-get install -y apt-transport-https ca-certificates curl gnupg-agent software-properties-common lsb-release \ + && curl -fsSL https://download.docker.com/linux/$(lsb_release -is | tr '[:upper:]' '[:lower:]')/gpg | apt-key add - 2>/dev/null \ + && add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/$(lsb_release -is | tr '[:upper:]' '[:lower:]') $(lsb_release -cs) stable" \ + && apt-get update \ + && apt-get install -y docker-ce-cli + +# Verify git, process tools installed +RUN apt-get -y install git procps wget nano zsh inotify-tools jq +RUN wget https://github.com/robbyrussell/oh-my-zsh/raw/master/tools/install.sh -O - | zsh || true + +COPY ./Makefile ./ + +RUN mkdir -p /go/src/github.com/xinsnake/databricks-sdk-golang ENV SHELL /bin/bash \ No newline at end of file diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 483c593..7982193 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,48 +1,48 @@ -// If you want to run as a non-root user in the container, see .devcontainer/docker-compose.yml. -{ - "name": "Go", - "dockerComposeFile": "docker-compose.yml", - "service": "docker-in-docker", - "workspaceFolder": "/go/src/github.com/xinsnake/databricks-sdk-golang", - "postCreateCommand": "", - "shutdownAction": "stopCompose", - "extensions": [ - "ms-azuretools.vscode-docker", - "ms-vscode.go" - ], - "settings": { - "terminal.integrated.shell.linux": "zsh", - "go.gopath": "/go", - "go.useLanguageServer": true, - "[go]": { - "editor.formatOnSave": true, - "editor.codeActionsOnSave": { - "source.organizeImports": true, - }, - // Optional: Disable snippets, as they conflict with completion ranking. - "editor.snippetSuggestions": "none", - }, - "[go.mod]": { - "editor.formatOnSave": true, - "editor.codeActionsOnSave": { - "source.organizeImports": true, - }, - }, - "gopls": { - "usePlaceholders": true, // add parameter placeholders when completing a function - // Experimental settings - "completeUnimported": true, // autocomplete unimported packages - "deepCompletion": true // enable deep completion - }, - "go.toolsEnvVars": { - "GO111MODULE": "on" - }, - "go.lintTool": "golangci-lint", - "go.lintFlags": [ - "--fast" - ], - "remote.extensionKind": { - "ms-azuretools.vscode-docker": "workspace" - } - } +// If you want to run as a non-root user in the container, see .devcontainer/docker-compose.yml. +{ + "name": "Go", + "dockerComposeFile": "docker-compose.yml", + "service": "docker-in-docker", + "workspaceFolder": "/go/src/github.com/xinsnake/databricks-sdk-golang", + "postCreateCommand": "", + "shutdownAction": "stopCompose", + "extensions": [ + "ms-azuretools.vscode-docker", + "ms-vscode.go" + ], + "settings": { + "terminal.integrated.shell.linux": "zsh", + "go.gopath": "/go", + "go.useLanguageServer": true, + "[go]": { + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.organizeImports": true, + }, + // Optional: Disable snippets, as they conflict with completion ranking. + "editor.snippetSuggestions": "none", + }, + "[go.mod]": { + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.organizeImports": true, + }, + }, + "gopls": { + "usePlaceholders": true, // add parameter placeholders when completing a function + // Experimental settings + "completeUnimported": true, // autocomplete unimported packages + "deepCompletion": true // enable deep completion + }, + "go.toolsEnvVars": { + "GO111MODULE": "on" + }, + "go.lintTool": "golangci-lint", + "go.lintFlags": [ + "--fast" + ], + "remote.extensionKind": { + "ms-azuretools.vscode-docker": "workspace" + } + } } \ No newline at end of file diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 39f1b93..03b903c 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -1,21 +1,21 @@ -version: '3' -services: - docker-in-docker: - build: - context: ../ - dockerfile: .devcontainer/Dockerfile - network_mode: "host" - volumes: - # Update this to wherever you want VS Code to mount the folder of your project - - ..:/go/src/github.com/xinsnake/databricks-sdk-golang - - # This lets you avoid setting up Git again in the container - - ~/.gitconfig:/root/.gitconfig - - ~/.ssh:/root/.ssh:ro # does not work on Windows! Will need to generate in container :( - # Forwarding the socket is optional, but lets docker work inside the container if you install the Docker CLI. - # See the docker-in-docker-compose definition for details on how to install it. - - /var/run/docker.sock:/var/run/docker.sock - - # Overrides default command so things don't shut down after the process ends - useful for debugging - command: sleep infinity - +version: '3' +services: + docker-in-docker: + build: + context: ../ + dockerfile: .devcontainer/Dockerfile + network_mode: "host" + volumes: + # Update this to wherever you want VS Code to mount the folder of your project + - ..:/go/src/github.com/xinsnake/databricks-sdk-golang + + # This lets you avoid setting up Git again in the container + - ~/.gitconfig:/root/.gitconfig + - ~/.ssh:/root/.ssh:ro # does not work on Windows! Will need to generate in container :( + # Forwarding the socket is optional, but lets docker work inside the container if you install the Docker CLI. + # See the docker-in-docker-compose definition for details on how to install it. + - /var/run/docker.sock:/var/run/docker.sock + + # Overrides default command so things don't shut down after the process ends - useful for debugging + command: sleep infinity + diff --git a/.vscode/settings.json b/.vscode/settings.json index a460645..e389e50 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,3 +1,3 @@ -{ - "go.inferGopath": false +{ + "go.inferGopath": false } \ No newline at end of file diff --git a/LICENSE b/LICENSE index 7543d8b..8f835c2 100644 --- a/LICENSE +++ b/LICENSE @@ -1,13 +1,13 @@ -Copyright 2019 Xinyun Zhou - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and +Copyright 2019 Xinyun Zhou + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and limitations under the License. \ No newline at end of file diff --git a/Makefile b/Makefile index 63bc2a1..7206fc8 100644 --- a/Makefile +++ b/Makefile @@ -1,14 +1,14 @@ -all : checks test - -checks: - go build all - golangci-lint run - -test: checks - go test ./... - -fmt: - find . -name '*.go' | grep -v vendor | xargs gofmt -s -w - -deepcopy: - ./cmd/deepcopy-gen -i ./,./aws/...,./azure/... -h ./hack/boilerplate.go.txt -v 3 +all : checks test + +checks: + go build all + golangci-lint run + +test: checks + go test ./... + +fmt: + find . -name '*.go' | grep -v vendor | xargs gofmt -s -w + +deepcopy: + ./cmd/deepcopy-gen -i ./,./aws/...,./azure/... -h ./hack/boilerplate.go.txt -v 3 diff --git a/README.md b/README.md index 56546b0..686ed14 100644 --- a/README.md +++ b/README.md @@ -1,47 +1,47 @@ -# databricks-sdk-golang - -This is a Golang SDK for [DataBricks REST API 2.0](https://docs.databricks.com/api/latest/index.html#) and [Azure DataBricks REST API 2.0](https://docs.azuredatabricks.net/api/latest/index.html). - -**WARNING: The SDK is unstable and under development. More testing needed!** - -## Usage - -```go -import ( - databricks "github.com/xinsnake/databricks-sdk-golang" - dbAzure "github.com/xinsnake/databricks-sdk-golang/azure" - // dbAws "github.com/xinsnake/databricks-sdk-golang/aws" -) - -var o databricks.DBClientOption -o.Host = os.Getenv("DATABRICKS_HOST") -o.Token = os.Getenv("DATABRICKS_TOKEN") - -var c dbAzure.DBClient -c.Init(o) - -jobs, err := c.Jobs().List() -``` - -## Implementation Progress - -Everything except SCIM API are implemented. Please refer to the progress below: - -| API | AWS | Azure | -| :--- | :---: | :---: | -| Clusters API | ✔ | ✔ | -| DBFS API | ✔ | ✔ | -| Groups API | ✔ | ✔ | -| Instance Pools API (preview) | ✗ | ✗ | -| Instance Profiles API | ✔ | N/A | -| Jobs API | ✔ | ✔ | -| Libraries API | ✔ | ✔ | -| MLflow API | ✗ | ✗ | -| SCIM API (preview) | ✗ | ✗ | -| Secrets API | ✔ | ✔ | -| Token API | ✔ | ✔ | -| Workspace API | ✔ | ✔ | - -## Notes - +# databricks-sdk-golang + +This is a Golang SDK for [DataBricks REST API 2.0](https://docs.databricks.com/api/latest/index.html#) and [Azure DataBricks REST API 2.0](https://docs.azuredatabricks.net/api/latest/index.html). + +**WARNING: The SDK is unstable and under development. More testing needed!** + +## Usage + +```go +import ( + databricks "github.com/xinsnake/databricks-sdk-golang" + dbAzure "github.com/xinsnake/databricks-sdk-golang/azure" + // dbAws "github.com/xinsnake/databricks-sdk-golang/aws" +) + +var o databricks.DBClientOption +o.Host = os.Getenv("DATABRICKS_HOST") +o.Token = os.Getenv("DATABRICKS_TOKEN") + +var c dbAzure.DBClient +c.Init(o) + +jobs, err := c.Jobs().List() +``` + +## Implementation Progress + +Everything except SCIM API are implemented. Please refer to the progress below: + +| API | AWS | Azure | +| :--- | :---: | :---: | +| Clusters API | ✔ | ✔ | +| DBFS API | ✔ | ✔ | +| Groups API | ✔ | ✔ | +| Instance Pools API (preview) | ✗ | ✗ | +| Instance Profiles API | ✔ | N/A | +| Jobs API | ✔ | ✔ | +| Libraries API | ✔ | ✔ | +| MLflow API | ✗ | ✗ | +| SCIM API (preview) | ✗ | ✗ | +| Secrets API | ✔ | ✔ | +| Token API | ✔ | ✔ | +| Workspace API | ✔ | ✔ | + +## Notes + - [Deepcopy](https://godoc.org/k8s.io/gengo/examples/deepcopy-gen) is generated shall you need it. \ No newline at end of file diff --git a/aws/client.go b/aws/client.go index 777fc52..d22da8f 100644 --- a/aws/client.go +++ b/aws/client.go @@ -1,79 +1,79 @@ -package aws - -import databricks "github.com/xinsnake/databricks-sdk-golang" - -// DBClient is the client for Azure implements DBClient -type DBClient struct { - Option databricks.DBClientOption -} - -// Init initializes the client -func (c *DBClient) Init(option databricks.DBClientOption) DBClient { - c.Option = option - option.Init() - return *c -} - -// Clusters returns an instance of ClustersAPI -func (c DBClient) Clusters() ClustersAPI { - var clustersAPI ClustersAPI - return clustersAPI.init(c) -} - -// Dbfs returns an instance of DbfsAPI -func (c DBClient) Dbfs() DbfsAPI { - var dbfsAPI DbfsAPI - return dbfsAPI.init(c) -} - -// Groups returns an instance of GroupAPI -func (c DBClient) Groups() GroupsAPI { - var groupsAPI GroupsAPI - return groupsAPI.init(c) -} - -// InstanceProfiles returns an instance of GroupAPI -func (c DBClient) InstanceProfiles() InstanceProfilesAPI { - var instanceProfilesAPI InstanceProfilesAPI - return instanceProfilesAPI.init(c) -} - -// Jobs returns an instance of JobsAPI -func (c DBClient) Jobs() JobsAPI { - var jobsAPI JobsAPI - return jobsAPI.init(c) -} - -// Libraries returns an instance of LibrariesAPI -func (c DBClient) Libraries() LibrariesAPI { - var libraries LibrariesAPI - return libraries.init(c) -} - -// Scim returns an instance of ScimAPI -func (c DBClient) Scim() ScimAPI { - var scimAPI ScimAPI - return scimAPI.init(c) -} - -// Secrets returns an instance of SecretsAPI -func (c DBClient) Secrets() SecretsAPI { - var secretsAPI SecretsAPI - return secretsAPI.init(c) -} - -// Token returns an instance of TokensAPI -func (c DBClient) Token() TokenAPI { - var tokenAPI TokenAPI - return tokenAPI.init(c) -} - -// Workspace returns an instance of WorkspaceAPI -func (c DBClient) Workspace() WorkspaceAPI { - var workspaceAPI WorkspaceAPI - return workspaceAPI.init(c) -} - -func (c *DBClient) performQuery(method, path string, data interface{}, headers map[string]string) ([]byte, error) { - return databricks.PerformQuery(c.Option, method, path, data, headers) -} +package aws + +import databricks "github.com/xinsnake/databricks-sdk-golang" + +// DBClient is the client for Azure implements DBClient +type DBClient struct { + Option databricks.DBClientOption +} + +// Init initializes the client +func (c *DBClient) Init(option databricks.DBClientOption) DBClient { + c.Option = option + option.Init() + return *c +} + +// Clusters returns an instance of ClustersAPI +func (c DBClient) Clusters() ClustersAPI { + var clustersAPI ClustersAPI + return clustersAPI.init(c) +} + +// Dbfs returns an instance of DbfsAPI +func (c DBClient) Dbfs() DbfsAPI { + var dbfsAPI DbfsAPI + return dbfsAPI.init(c) +} + +// Groups returns an instance of GroupAPI +func (c DBClient) Groups() GroupsAPI { + var groupsAPI GroupsAPI + return groupsAPI.init(c) +} + +// InstanceProfiles returns an instance of GroupAPI +func (c DBClient) InstanceProfiles() InstanceProfilesAPI { + var instanceProfilesAPI InstanceProfilesAPI + return instanceProfilesAPI.init(c) +} + +// Jobs returns an instance of JobsAPI +func (c DBClient) Jobs() JobsAPI { + var jobsAPI JobsAPI + return jobsAPI.init(c) +} + +// Libraries returns an instance of LibrariesAPI +func (c DBClient) Libraries() LibrariesAPI { + var libraries LibrariesAPI + return libraries.init(c) +} + +// Scim returns an instance of ScimAPI +func (c DBClient) Scim() ScimAPI { + var scimAPI ScimAPI + return scimAPI.init(c) +} + +// Secrets returns an instance of SecretsAPI +func (c DBClient) Secrets() SecretsAPI { + var secretsAPI SecretsAPI + return secretsAPI.init(c) +} + +// Token returns an instance of TokensAPI +func (c DBClient) Token() TokenAPI { + var tokenAPI TokenAPI + return tokenAPI.init(c) +} + +// Workspace returns an instance of WorkspaceAPI +func (c DBClient) Workspace() WorkspaceAPI { + var workspaceAPI WorkspaceAPI + return workspaceAPI.init(c) +} + +func (c *DBClient) performQuery(method, path string, data interface{}, headers map[string]string) ([]byte, error) { + return databricks.PerformQuery(c.Option, method, path, data, headers) +} diff --git a/aws/clusters.go b/aws/clusters.go index f1a9d5c..3170f82 100644 --- a/aws/clusters.go +++ b/aws/clusters.go @@ -1,250 +1,250 @@ -package aws - -import ( - "encoding/json" - "net/http" - - "github.com/xinsnake/databricks-sdk-golang/aws/models" -) - -// ClustersAPI exposes the Clusters API -type ClustersAPI struct { - Client DBClient -} - -func (a ClustersAPI) init(client DBClient) ClustersAPI { - a.Client = client - return a -} - -// Create creates a new Spark cluster -func (a ClustersAPI) Create(cluster models.NewCluster) (models.ClusterInfo, error) { - var clusterInfo models.ClusterInfo - - resp, err := a.Client.performQuery(http.MethodPost, "/clusters/create", cluster, nil) - if err != nil { - return clusterInfo, err - } - - err = json.Unmarshal(resp, &clusterInfo) - return clusterInfo, err -} - -// Edit edits the configuration of a cluster to match the provided attributes and size -func (a ClustersAPI) Edit(clusterInfo models.ClusterInfo) error { - _, err := a.Client.performQuery(http.MethodPost, "/clusters/edit", clusterInfo, nil) - return err -} - -// Start starts a terminated Spark cluster given its ID -func (a ClustersAPI) Start(clusterID string) error { - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - }{ - clusterID, - } - _, err := a.Client.performQuery(http.MethodPost, "/clusters/start", data, nil) - return err -} - -// Restart restart a Spark cluster given its ID. If the cluster is not in a RUNNING state, nothing will happen. -func (a ClustersAPI) Restart(clusterID string) error { - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - }{ - clusterID, - } - _, err := a.Client.performQuery(http.MethodPost, "/clusters/restart", data, nil) - return err -} - -// Resize resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a RUNNING state. -func (a ClustersAPI) Resize(clusterID string, clusterSize models.ClusterSize) error { - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - models.ClusterSize - }{ - clusterID, - clusterSize, - } - _, err := a.Client.performQuery(http.MethodPost, "/clusters/resize", data, nil) - return err -} - -// Terminate terminates a Spark cluster given its ID -func (a ClustersAPI) Terminate(clusterID string) error { - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - }{ - clusterID, - } - _, err := a.Client.performQuery(http.MethodPost, "/clusters/delete", data, nil) - return err -} - -// Delete is an alias of Terminate -func (a ClustersAPI) Delete(clusterID string) error { - return a.Terminate(clusterID) -} - -// PermanentDelete permanently delete a cluster -func (a ClustersAPI) PermanentDelete(clusterID string) error { - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - }{ - clusterID, - } - _, err := a.Client.performQuery(http.MethodPost, "/clusters/permanent-delete", data, nil) - return err -} - -// Get retrieves the information for a cluster given its identifier -func (a ClustersAPI) Get(clusterID string) (models.ClusterInfo, error) { - var clusterInfo models.ClusterInfo - - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - }{ - clusterID, - } - resp, err := a.Client.performQuery(http.MethodGet, "/clusters/get-delete", data, nil) - - if err != nil { - return clusterInfo, err - } - - err = json.Unmarshal(resp, &clusterInfo) - return clusterInfo, err -} - -// Pin ensure that an interactive cluster configuration is retained even after a cluster has been terminated for more than 30 days -func (a ClustersAPI) Pin(clusterID string) error { - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - }{ - clusterID, - } - _, err := a.Client.performQuery(http.MethodPost, "/clusters/pin", data, nil) - return err -} - -// Unpin allows the cluster to eventually be removed from the list returned by the List API -func (a ClustersAPI) Unpin(clusterID string) error { - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - }{ - clusterID, - } - _, err := a.Client.performQuery(http.MethodPost, "/clusters/unpin", data, nil) - return err -} - -// List return information about all pinned clusters, currently active clusters, -// up to 70 of the most recently terminated interactive clusters in the past 30 days, -// and up to 30 of the most recently terminated job clusters in the past 30 days -func (a ClustersAPI) List() ([]models.ClusterInfo, error) { - var clusterList = struct { - Clusters []models.ClusterInfo `json:"clusters,omitempty" url:"clusters,omitempty"` - }{} - - resp, err := a.Client.performQuery(http.MethodGet, "/clusters/list", nil, nil) - if err != nil { - return clusterList.Clusters, err - } - - err = json.Unmarshal(resp, &clusterList) - return clusterList.Clusters, err -} - -// ListNodeTypes returns a list of supported Spark node types -func (a ClustersAPI) ListNodeTypes() ([]models.NodeType, error) { - var nodeTypeList = struct { - NodeTypes []models.NodeType `json:"node_types,omitempty" url:"node_types,omitempty"` - }{} - - resp, err := a.Client.performQuery(http.MethodGet, "/clusters/list-node-types", nil, nil) - if err != nil { - return nodeTypeList.NodeTypes, err - } - - err = json.Unmarshal(resp, &nodeTypeList) - return nodeTypeList.NodeTypes, err -} - -// SparkVersions return the list of available Spark versions -func (a ClustersAPI) SparkVersions() ([]models.SparkVersion, error) { - var versionsList = struct { - Versions []models.SparkVersion `json:"versions,omitempty" url:"versions,omitempty"` - }{} - - resp, err := a.Client.performQuery(http.MethodGet, "/clusters/spark-versions", nil, nil) - if err != nil { - return versionsList.Versions, err - } - - err = json.Unmarshal(resp, &versionsList) - return versionsList.Versions, err -} - -// ClustersListZonesResponse is the response from ListZones -type ClustersListZonesResponse struct { - Zones []string `json:"zones,omitempty" url:"zones,omitempty"` - DefaultZone string `json:"default_zone,omitempty" url:"default_zone,omitempty"` -} - -// ListZones returns a list of availability zones where clusters can be created in (ex: us-west-2a) -func (a ClustersAPI) ListZones() (ClustersListZonesResponse, error) { - var zonesList ClustersListZonesResponse - - resp, err := a.Client.performQuery(http.MethodGet, "/clusters/list-zones", nil, nil) - if err != nil { - return zonesList, err - } - - err = json.Unmarshal(resp, &zonesList) - return zonesList, err -} - -// ClustersEventsResponse is the response from Events -type ClustersEventsResponse struct { - Events []models.ClusterEvent `json:"events,omitempty" url:"events,omitempty"` - NextPage struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - EndTime int64 `json:"end_time,omitempty" url:"end_time,omitempty"` - Offset int32 `json:"offset,omitempty" url:"offset,omitempty"` - } `json:"next_page,omitempty" url:"next_page,omitempty"` - TotalCount int32 `json:"total_count,omitempty" url:"total_count,omitempty"` -} - -// Events retrieves a list of events about the activity of a cluster -func (a ClustersAPI) Events( - clusterID string, startTime, endTime int64, order models.ListOrder, - eventTypes []models.ClusterEventType, offset, limit int64) (ClustersEventsResponse, error) { - - var eventsResponse ClustersEventsResponse - - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"` - EndTime int64 `json:"end_time,omitempty" url:"end_time,omitempty"` - Order models.ListOrder `json:"order,omitempty" url:"order,omitempty"` - EventTypes []models.ClusterEventType `json:"event_types,omitempty" url:"event_types,omitempty"` - Offset int64 `json:"offset,omitempty" url:"offset,omitempty"` - Limit int64 `json:"limit,omitempty" url:"limit,omitempty"` - }{ - clusterID, - startTime, - endTime, - order, - eventTypes, - offset, - limit, - } - resp, err := a.Client.performQuery(http.MethodPost, "/clusters/events", data, nil) - if err != nil { - return eventsResponse, err - } - - err = json.Unmarshal(resp, &eventsResponse) - return eventsResponse, err -} +package aws + +import ( + "encoding/json" + "net/http" + + "github.com/xinsnake/databricks-sdk-golang/aws/models" +) + +// ClustersAPI exposes the Clusters API +type ClustersAPI struct { + Client DBClient +} + +func (a ClustersAPI) init(client DBClient) ClustersAPI { + a.Client = client + return a +} + +// Create creates a new Spark cluster +func (a ClustersAPI) Create(cluster models.NewCluster) (models.ClusterInfo, error) { + var clusterInfo models.ClusterInfo + + resp, err := a.Client.performQuery(http.MethodPost, "/clusters/create", cluster, nil) + if err != nil { + return clusterInfo, err + } + + err = json.Unmarshal(resp, &clusterInfo) + return clusterInfo, err +} + +// Edit edits the configuration of a cluster to match the provided attributes and size +func (a ClustersAPI) Edit(clusterInfo models.ClusterInfo) error { + _, err := a.Client.performQuery(http.MethodPost, "/clusters/edit", clusterInfo, nil) + return err +} + +// Start starts a terminated Spark cluster given its ID +func (a ClustersAPI) Start(clusterID string) error { + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + }{ + clusterID, + } + _, err := a.Client.performQuery(http.MethodPost, "/clusters/start", data, nil) + return err +} + +// Restart restart a Spark cluster given its ID. If the cluster is not in a RUNNING state, nothing will happen. +func (a ClustersAPI) Restart(clusterID string) error { + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + }{ + clusterID, + } + _, err := a.Client.performQuery(http.MethodPost, "/clusters/restart", data, nil) + return err +} + +// Resize resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a RUNNING state. +func (a ClustersAPI) Resize(clusterID string, clusterSize models.ClusterSize) error { + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + models.ClusterSize + }{ + clusterID, + clusterSize, + } + _, err := a.Client.performQuery(http.MethodPost, "/clusters/resize", data, nil) + return err +} + +// Terminate terminates a Spark cluster given its ID +func (a ClustersAPI) Terminate(clusterID string) error { + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + }{ + clusterID, + } + _, err := a.Client.performQuery(http.MethodPost, "/clusters/delete", data, nil) + return err +} + +// Delete is an alias of Terminate +func (a ClustersAPI) Delete(clusterID string) error { + return a.Terminate(clusterID) +} + +// PermanentDelete permanently delete a cluster +func (a ClustersAPI) PermanentDelete(clusterID string) error { + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + }{ + clusterID, + } + _, err := a.Client.performQuery(http.MethodPost, "/clusters/permanent-delete", data, nil) + return err +} + +// Get retrieves the information for a cluster given its identifier +func (a ClustersAPI) Get(clusterID string) (models.ClusterInfo, error) { + var clusterInfo models.ClusterInfo + + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + }{ + clusterID, + } + resp, err := a.Client.performQuery(http.MethodGet, "/clusters/get-delete", data, nil) + + if err != nil { + return clusterInfo, err + } + + err = json.Unmarshal(resp, &clusterInfo) + return clusterInfo, err +} + +// Pin ensure that an interactive cluster configuration is retained even after a cluster has been terminated for more than 30 days +func (a ClustersAPI) Pin(clusterID string) error { + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + }{ + clusterID, + } + _, err := a.Client.performQuery(http.MethodPost, "/clusters/pin", data, nil) + return err +} + +// Unpin allows the cluster to eventually be removed from the list returned by the List API +func (a ClustersAPI) Unpin(clusterID string) error { + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + }{ + clusterID, + } + _, err := a.Client.performQuery(http.MethodPost, "/clusters/unpin", data, nil) + return err +} + +// List return information about all pinned clusters, currently active clusters, +// up to 70 of the most recently terminated interactive clusters in the past 30 days, +// and up to 30 of the most recently terminated job clusters in the past 30 days +func (a ClustersAPI) List() ([]models.ClusterInfo, error) { + var clusterList = struct { + Clusters []models.ClusterInfo `json:"clusters,omitempty" url:"clusters,omitempty"` + }{} + + resp, err := a.Client.performQuery(http.MethodGet, "/clusters/list", nil, nil) + if err != nil { + return clusterList.Clusters, err + } + + err = json.Unmarshal(resp, &clusterList) + return clusterList.Clusters, err +} + +// ListNodeTypes returns a list of supported Spark node types +func (a ClustersAPI) ListNodeTypes() ([]models.NodeType, error) { + var nodeTypeList = struct { + NodeTypes []models.NodeType `json:"node_types,omitempty" url:"node_types,omitempty"` + }{} + + resp, err := a.Client.performQuery(http.MethodGet, "/clusters/list-node-types", nil, nil) + if err != nil { + return nodeTypeList.NodeTypes, err + } + + err = json.Unmarshal(resp, &nodeTypeList) + return nodeTypeList.NodeTypes, err +} + +// SparkVersions return the list of available Spark versions +func (a ClustersAPI) SparkVersions() ([]models.SparkVersion, error) { + var versionsList = struct { + Versions []models.SparkVersion `json:"versions,omitempty" url:"versions,omitempty"` + }{} + + resp, err := a.Client.performQuery(http.MethodGet, "/clusters/spark-versions", nil, nil) + if err != nil { + return versionsList.Versions, err + } + + err = json.Unmarshal(resp, &versionsList) + return versionsList.Versions, err +} + +// ClustersListZonesResponse is the response from ListZones +type ClustersListZonesResponse struct { + Zones []string `json:"zones,omitempty" url:"zones,omitempty"` + DefaultZone string `json:"default_zone,omitempty" url:"default_zone,omitempty"` +} + +// ListZones returns a list of availability zones where clusters can be created in (ex: us-west-2a) +func (a ClustersAPI) ListZones() (ClustersListZonesResponse, error) { + var zonesList ClustersListZonesResponse + + resp, err := a.Client.performQuery(http.MethodGet, "/clusters/list-zones", nil, nil) + if err != nil { + return zonesList, err + } + + err = json.Unmarshal(resp, &zonesList) + return zonesList, err +} + +// ClustersEventsResponse is the response from Events +type ClustersEventsResponse struct { + Events []models.ClusterEvent `json:"events,omitempty" url:"events,omitempty"` + NextPage struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + EndTime int64 `json:"end_time,omitempty" url:"end_time,omitempty"` + Offset int32 `json:"offset,omitempty" url:"offset,omitempty"` + } `json:"next_page,omitempty" url:"next_page,omitempty"` + TotalCount int32 `json:"total_count,omitempty" url:"total_count,omitempty"` +} + +// Events retrieves a list of events about the activity of a cluster +func (a ClustersAPI) Events( + clusterID string, startTime, endTime int64, order models.ListOrder, + eventTypes []models.ClusterEventType, offset, limit int64) (ClustersEventsResponse, error) { + + var eventsResponse ClustersEventsResponse + + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"` + EndTime int64 `json:"end_time,omitempty" url:"end_time,omitempty"` + Order models.ListOrder `json:"order,omitempty" url:"order,omitempty"` + EventTypes []models.ClusterEventType `json:"event_types,omitempty" url:"event_types,omitempty"` + Offset int64 `json:"offset,omitempty" url:"offset,omitempty"` + Limit int64 `json:"limit,omitempty" url:"limit,omitempty"` + }{ + clusterID, + startTime, + endTime, + order, + eventTypes, + offset, + limit, + } + resp, err := a.Client.performQuery(http.MethodPost, "/clusters/events", data, nil) + if err != nil { + return eventsResponse, err + } + + err = json.Unmarshal(resp, &eventsResponse) + return eventsResponse, err +} diff --git a/aws/dbfs.go b/aws/dbfs.go index 60c6739..21ed85c 100644 --- a/aws/dbfs.go +++ b/aws/dbfs.go @@ -1,204 +1,204 @@ -package aws - -import ( - "encoding/base64" - "encoding/json" - "net/http" - - "github.com/xinsnake/databricks-sdk-golang/aws/models" -) - -// DbfsAPI exposes the DBFS API -type DbfsAPI struct { - Client DBClient -} - -func (a DbfsAPI) init(client DBClient) DbfsAPI { - a.Client = client - return a -} - -// AddBlock appends a block of data to the stream specified by the input handle -func (a DbfsAPI) AddBlock(handle int64, data []byte) error { - data2 := struct { - Handle int64 `json:"handle,omitempty" url:"handle,omitempty"` - Data string `json:"data,omitempty" url:"data,omitempty"` - }{ - handle, - base64.StdEncoding.EncodeToString(data), - } - _, err := a.Client.performQuery(http.MethodPost, "/dbfs/add-block", data2, nil) - return err -} - -// Close closes the stream specified by the input handle -func (a DbfsAPI) Close(handle int64) error { - data := struct { - Handle int64 `json:"handle,omitempty" url:"handle,omitempty"` - }{ - handle, - } - _, err := a.Client.performQuery(http.MethodPost, "/dbfs/close", data, nil) - return err -} - -// DbfsCreateResponse is the response from Create -type DbfsCreateResponse struct { - Handle int64 `json:"handle,omitempty" url:"handle,omitempty"` -} - -// Create opens a stream to write to a file and returns a handle to this stream -func (a DbfsAPI) Create(path string, overwrite bool) (DbfsCreateResponse, error) { - var createResponse DbfsCreateResponse - - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - Overwrite bool `json:"overwrite,omitempty" url:"overwrite,omitempty"` - }{ - path, - overwrite, - } - resp, err := a.Client.performQuery(http.MethodPost, "/dbfs/create", data, nil) - - if err != nil { - return createResponse, err - } - - err = json.Unmarshal(resp, &createResponse) - return createResponse, err -} - -// Delete deletes the file or directory (optionally recursively delete all files in the directory) -func (a DbfsAPI) Delete(path string, recursive bool) error { - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - Recursive bool `json:"recursive,omitempty" url:"recursive,omitempty"` - }{ - path, - recursive, - } - _, err := a.Client.performQuery(http.MethodPost, "/dbfs/delete", data, nil) - return err -} - -// GetStatus gets the file information of a file or directory -func (a DbfsAPI) GetStatus(path string) (models.FileInfo, error) { - var fileInfo models.FileInfo - - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - }{ - path, - } - resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/get-status", data, nil) - - if err != nil { - return fileInfo, err - } - - err = json.Unmarshal(resp, &fileInfo) - - return fileInfo, err -} - -// DbfsListResponse is a list of FileInfo as a response of List -type DbfsListResponse struct { - Files []models.FileInfo `json:"files,omitempty" url:"files,omitempty"` -} - -// List lists the contents of a directory, or details of the file -func (a DbfsAPI) List(path string) ([]models.FileInfo, error) { - var listResponse DbfsListResponse - - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - }{ - path, - } - resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/list", data, nil) - - if err != nil { - return listResponse.Files, err - } - - err = json.Unmarshal(resp, &listResponse) - return listResponse.Files, err -} - -// Mkdirs creates the given directory and necessary parent directories if they do not exist -func (a DbfsAPI) Mkdirs(path string) error { - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - }{ - path, - } - _, err := a.Client.performQuery(http.MethodPost, "/dbfs/mkdirs", data, nil) - return err -} - -// Move moves a file from one location to another location within DBFS -func (a DbfsAPI) Move(sourcePath, destinationPath string) error { - data := struct { - SourcePath string `json:"source_path,omitempty" url:"source_path,omitempty"` - DestinationPath string `json:"destination_path,omitempty" url:"destination_path,omitempty"` - }{ - sourcePath, - destinationPath, - } - _, err := a.Client.performQuery(http.MethodPost, "/dbfs/move", data, nil) - return err -} - -// Put uploads a file through the use of multipart form post -func (a DbfsAPI) Put(path string, contents []byte, overwrite bool) error { - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - Contents string `json:"contents,omitempty" url:"contents,omitempty"` - Overwrite bool `json:"overwrite,omitempty" url:"overwrite,omitempty"` - }{ - path, - base64.StdEncoding.EncodeToString(contents), - overwrite, - } - _, err := a.Client.performQuery(http.MethodPost, "/dbfs/put", data, nil) - return err -} - -// DbfsReadResponse is the response of reading a file -type DbfsReadResponse struct { - BytesRead int64 `json:"bytes_read,omitempty" url:"bytes_read,omitempty"` - Data []byte `json:"data,omitempty" url:"data,omitempty"` -} - -// Read returns the contents of a file -func (a DbfsAPI) Read(path string, offset, length int64) (DbfsReadResponse, error) { - var readResponseBase64 struct { - BytesRead int64 `json:"bytes_read,omitempty" url:"bytes_read,omitempty"` - Data string `json:"data,omitempty" url:"data,omitempty"` - } - var readResponse DbfsReadResponse - - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - Offset int64 `json:"offset,omitempty" url:"offset,omitempty"` - Length int64 `json:"length,omitempty" url:"length,omitempty"` - }{ - path, - offset, - length, - } - resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/read", data, nil) - - if err != nil { - return readResponse, err - } - - err = json.Unmarshal(resp, &readResponseBase64) - if err != nil { - return readResponse, err - } - - readResponse.BytesRead = readResponseBase64.BytesRead - readResponse.Data, err = base64.StdEncoding.DecodeString(readResponseBase64.Data) - return readResponse, err -} +package aws + +import ( + "encoding/base64" + "encoding/json" + "net/http" + + "github.com/xinsnake/databricks-sdk-golang/aws/models" +) + +// DbfsAPI exposes the DBFS API +type DbfsAPI struct { + Client DBClient +} + +func (a DbfsAPI) init(client DBClient) DbfsAPI { + a.Client = client + return a +} + +// AddBlock appends a block of data to the stream specified by the input handle +func (a DbfsAPI) AddBlock(handle int64, data []byte) error { + data2 := struct { + Handle int64 `json:"handle,omitempty" url:"handle,omitempty"` + Data string `json:"data,omitempty" url:"data,omitempty"` + }{ + handle, + base64.StdEncoding.EncodeToString(data), + } + _, err := a.Client.performQuery(http.MethodPost, "/dbfs/add-block", data2, nil) + return err +} + +// Close closes the stream specified by the input handle +func (a DbfsAPI) Close(handle int64) error { + data := struct { + Handle int64 `json:"handle,omitempty" url:"handle,omitempty"` + }{ + handle, + } + _, err := a.Client.performQuery(http.MethodPost, "/dbfs/close", data, nil) + return err +} + +// DbfsCreateResponse is the response from Create +type DbfsCreateResponse struct { + Handle int64 `json:"handle,omitempty" url:"handle,omitempty"` +} + +// Create opens a stream to write to a file and returns a handle to this stream +func (a DbfsAPI) Create(path string, overwrite bool) (DbfsCreateResponse, error) { + var createResponse DbfsCreateResponse + + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + Overwrite bool `json:"overwrite,omitempty" url:"overwrite,omitempty"` + }{ + path, + overwrite, + } + resp, err := a.Client.performQuery(http.MethodPost, "/dbfs/create", data, nil) + + if err != nil { + return createResponse, err + } + + err = json.Unmarshal(resp, &createResponse) + return createResponse, err +} + +// Delete deletes the file or directory (optionally recursively delete all files in the directory) +func (a DbfsAPI) Delete(path string, recursive bool) error { + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + Recursive bool `json:"recursive,omitempty" url:"recursive,omitempty"` + }{ + path, + recursive, + } + _, err := a.Client.performQuery(http.MethodPost, "/dbfs/delete", data, nil) + return err +} + +// GetStatus gets the file information of a file or directory +func (a DbfsAPI) GetStatus(path string) (models.FileInfo, error) { + var fileInfo models.FileInfo + + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + }{ + path, + } + resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/get-status", data, nil) + + if err != nil { + return fileInfo, err + } + + err = json.Unmarshal(resp, &fileInfo) + + return fileInfo, err +} + +// DbfsListResponse is a list of FileInfo as a response of List +type DbfsListResponse struct { + Files []models.FileInfo `json:"files,omitempty" url:"files,omitempty"` +} + +// List lists the contents of a directory, or details of the file +func (a DbfsAPI) List(path string) ([]models.FileInfo, error) { + var listResponse DbfsListResponse + + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + }{ + path, + } + resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/list", data, nil) + + if err != nil { + return listResponse.Files, err + } + + err = json.Unmarshal(resp, &listResponse) + return listResponse.Files, err +} + +// Mkdirs creates the given directory and necessary parent directories if they do not exist +func (a DbfsAPI) Mkdirs(path string) error { + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + }{ + path, + } + _, err := a.Client.performQuery(http.MethodPost, "/dbfs/mkdirs", data, nil) + return err +} + +// Move moves a file from one location to another location within DBFS +func (a DbfsAPI) Move(sourcePath, destinationPath string) error { + data := struct { + SourcePath string `json:"source_path,omitempty" url:"source_path,omitempty"` + DestinationPath string `json:"destination_path,omitempty" url:"destination_path,omitempty"` + }{ + sourcePath, + destinationPath, + } + _, err := a.Client.performQuery(http.MethodPost, "/dbfs/move", data, nil) + return err +} + +// Put uploads a file through the use of multipart form post +func (a DbfsAPI) Put(path string, contents []byte, overwrite bool) error { + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + Contents string `json:"contents,omitempty" url:"contents,omitempty"` + Overwrite bool `json:"overwrite,omitempty" url:"overwrite,omitempty"` + }{ + path, + base64.StdEncoding.EncodeToString(contents), + overwrite, + } + _, err := a.Client.performQuery(http.MethodPost, "/dbfs/put", data, nil) + return err +} + +// DbfsReadResponse is the response of reading a file +type DbfsReadResponse struct { + BytesRead int64 `json:"bytes_read,omitempty" url:"bytes_read,omitempty"` + Data []byte `json:"data,omitempty" url:"data,omitempty"` +} + +// Read returns the contents of a file +func (a DbfsAPI) Read(path string, offset, length int64) (DbfsReadResponse, error) { + var readResponseBase64 struct { + BytesRead int64 `json:"bytes_read,omitempty" url:"bytes_read,omitempty"` + Data string `json:"data,omitempty" url:"data,omitempty"` + } + var readResponse DbfsReadResponse + + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + Offset int64 `json:"offset,omitempty" url:"offset,omitempty"` + Length int64 `json:"length,omitempty" url:"length,omitempty"` + }{ + path, + offset, + length, + } + resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/read", data, nil) + + if err != nil { + return readResponse, err + } + + err = json.Unmarshal(resp, &readResponseBase64) + if err != nil { + return readResponse, err + } + + readResponse.BytesRead = readResponseBase64.BytesRead + readResponse.Data, err = base64.StdEncoding.DecodeString(readResponseBase64.Data) + return readResponse, err +} diff --git a/aws/deepcopy_generated.go b/aws/deepcopy_generated.go index 467f842..345d798 100644 --- a/aws/deepcopy_generated.go +++ b/aws/deepcopy_generated.go @@ -1,396 +1,396 @@ -// +build !ignore_autogenerated - -// Code generated by deepcopy-gen. DO NOT EDIT. - -package aws - -import ( - models "github.com/xinsnake/databricks-sdk-golang/aws/models" -) - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClustersAPI) DeepCopyInto(out *ClustersAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClustersAPI. -func (in *ClustersAPI) DeepCopy() *ClustersAPI { - if in == nil { - return nil - } - out := new(ClustersAPI) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClustersEventsResponse) DeepCopyInto(out *ClustersEventsResponse) { - *out = *in - if in.Events != nil { - in, out := &in.Events, &out.Events - *out = make([]models.ClusterEvent, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - out.NextPage = in.NextPage - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClustersEventsResponse. -func (in *ClustersEventsResponse) DeepCopy() *ClustersEventsResponse { - if in == nil { - return nil - } - out := new(ClustersEventsResponse) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClustersListZonesResponse) DeepCopyInto(out *ClustersListZonesResponse) { - *out = *in - if in.Zones != nil { - in, out := &in.Zones, &out.Zones - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClustersListZonesResponse. -func (in *ClustersListZonesResponse) DeepCopy() *ClustersListZonesResponse { - if in == nil { - return nil - } - out := new(ClustersListZonesResponse) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DBClient) DeepCopyInto(out *DBClient) { - *out = *in - in.Option.DeepCopyInto(&out.Option) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DBClient. -func (in *DBClient) DeepCopy() *DBClient { - if in == nil { - return nil - } - out := new(DBClient) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DbfsAPI) DeepCopyInto(out *DbfsAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsAPI. -func (in *DbfsAPI) DeepCopy() *DbfsAPI { - if in == nil { - return nil - } - out := new(DbfsAPI) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DbfsCreateResponse) DeepCopyInto(out *DbfsCreateResponse) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsCreateResponse. -func (in *DbfsCreateResponse) DeepCopy() *DbfsCreateResponse { - if in == nil { - return nil - } - out := new(DbfsCreateResponse) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DbfsListResponse) DeepCopyInto(out *DbfsListResponse) { - *out = *in - if in.Files != nil { - in, out := &in.Files, &out.Files - *out = make([]models.FileInfo, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsListResponse. -func (in *DbfsListResponse) DeepCopy() *DbfsListResponse { - if in == nil { - return nil - } - out := new(DbfsListResponse) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DbfsReadResponse) DeepCopyInto(out *DbfsReadResponse) { - *out = *in - if in.Data != nil { - in, out := &in.Data, &out.Data - *out = make([]byte, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsReadResponse. -func (in *DbfsReadResponse) DeepCopy() *DbfsReadResponse { - if in == nil { - return nil - } - out := new(DbfsReadResponse) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *GroupsAPI) DeepCopyInto(out *GroupsAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GroupsAPI. -func (in *GroupsAPI) DeepCopy() *GroupsAPI { - if in == nil { - return nil - } - out := new(GroupsAPI) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *GroupsCreateResponse) DeepCopyInto(out *GroupsCreateResponse) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GroupsCreateResponse. -func (in *GroupsCreateResponse) DeepCopy() *GroupsCreateResponse { - if in == nil { - return nil - } - out := new(GroupsCreateResponse) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InstanceProfilesAPI) DeepCopyInto(out *InstanceProfilesAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstanceProfilesAPI. -func (in *InstanceProfilesAPI) DeepCopy() *InstanceProfilesAPI { - if in == nil { - return nil - } - out := new(InstanceProfilesAPI) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobsAPI) DeepCopyInto(out *JobsAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsAPI. -func (in *JobsAPI) DeepCopy() *JobsAPI { - if in == nil { - return nil - } - out := new(JobsAPI) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobsRunsGetOutputResponse) DeepCopyInto(out *JobsRunsGetOutputResponse) { - *out = *in - out.NotebookOutput = in.NotebookOutput - in.Metadata.DeepCopyInto(&out.Metadata) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsRunsGetOutputResponse. -func (in *JobsRunsGetOutputResponse) DeepCopy() *JobsRunsGetOutputResponse { - if in == nil { - return nil - } - out := new(JobsRunsGetOutputResponse) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobsRunsListResponse) DeepCopyInto(out *JobsRunsListResponse) { - *out = *in - if in.Runs != nil { - in, out := &in.Runs, &out.Runs - *out = make([]models.Run, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsRunsListResponse. -func (in *JobsRunsListResponse) DeepCopy() *JobsRunsListResponse { - if in == nil { - return nil - } - out := new(JobsRunsListResponse) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LibrariesAPI) DeepCopyInto(out *LibrariesAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LibrariesAPI. -func (in *LibrariesAPI) DeepCopy() *LibrariesAPI { - if in == nil { - return nil - } - out := new(LibrariesAPI) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LibrariesClusterStatusResponse) DeepCopyInto(out *LibrariesClusterStatusResponse) { - *out = *in - if in.LibraryStatuses != nil { - in, out := &in.LibraryStatuses, &out.LibraryStatuses - *out = make([]models.LibraryFullStatus, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LibrariesClusterStatusResponse. -func (in *LibrariesClusterStatusResponse) DeepCopy() *LibrariesClusterStatusResponse { - if in == nil { - return nil - } - out := new(LibrariesClusterStatusResponse) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ScimAPI) DeepCopyInto(out *ScimAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScimAPI. -func (in *ScimAPI) DeepCopy() *ScimAPI { - if in == nil { - return nil - } - out := new(ScimAPI) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SecretsAPI) DeepCopyInto(out *SecretsAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecretsAPI. -func (in *SecretsAPI) DeepCopy() *SecretsAPI { - if in == nil { - return nil - } - out := new(SecretsAPI) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TokenAPI) DeepCopyInto(out *TokenAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TokenAPI. -func (in *TokenAPI) DeepCopy() *TokenAPI { - if in == nil { - return nil - } - out := new(TokenAPI) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TokenCreateResponse) DeepCopyInto(out *TokenCreateResponse) { - *out = *in - out.TokenInfo = in.TokenInfo - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TokenCreateResponse. -func (in *TokenCreateResponse) DeepCopy() *TokenCreateResponse { - if in == nil { - return nil - } - out := new(TokenCreateResponse) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *WorkspaceAPI) DeepCopyInto(out *WorkspaceAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkspaceAPI. -func (in *WorkspaceAPI) DeepCopy() *WorkspaceAPI { - if in == nil { - return nil - } - out := new(WorkspaceAPI) - in.DeepCopyInto(out) - return out -} +// +build !ignore_autogenerated + +// Code generated by deepcopy-gen. DO NOT EDIT. + +package aws + +import ( + models "github.com/xinsnake/databricks-sdk-golang/aws/models" +) + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ClustersAPI) DeepCopyInto(out *ClustersAPI) { + *out = *in + in.Client.DeepCopyInto(&out.Client) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClustersAPI. +func (in *ClustersAPI) DeepCopy() *ClustersAPI { + if in == nil { + return nil + } + out := new(ClustersAPI) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ClustersEventsResponse) DeepCopyInto(out *ClustersEventsResponse) { + *out = *in + if in.Events != nil { + in, out := &in.Events, &out.Events + *out = make([]models.ClusterEvent, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + out.NextPage = in.NextPage + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClustersEventsResponse. +func (in *ClustersEventsResponse) DeepCopy() *ClustersEventsResponse { + if in == nil { + return nil + } + out := new(ClustersEventsResponse) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ClustersListZonesResponse) DeepCopyInto(out *ClustersListZonesResponse) { + *out = *in + if in.Zones != nil { + in, out := &in.Zones, &out.Zones + *out = make([]string, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClustersListZonesResponse. +func (in *ClustersListZonesResponse) DeepCopy() *ClustersListZonesResponse { + if in == nil { + return nil + } + out := new(ClustersListZonesResponse) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DBClient) DeepCopyInto(out *DBClient) { + *out = *in + in.Option.DeepCopyInto(&out.Option) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DBClient. +func (in *DBClient) DeepCopy() *DBClient { + if in == nil { + return nil + } + out := new(DBClient) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DbfsAPI) DeepCopyInto(out *DbfsAPI) { + *out = *in + in.Client.DeepCopyInto(&out.Client) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsAPI. +func (in *DbfsAPI) DeepCopy() *DbfsAPI { + if in == nil { + return nil + } + out := new(DbfsAPI) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DbfsCreateResponse) DeepCopyInto(out *DbfsCreateResponse) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsCreateResponse. +func (in *DbfsCreateResponse) DeepCopy() *DbfsCreateResponse { + if in == nil { + return nil + } + out := new(DbfsCreateResponse) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DbfsListResponse) DeepCopyInto(out *DbfsListResponse) { + *out = *in + if in.Files != nil { + in, out := &in.Files, &out.Files + *out = make([]models.FileInfo, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsListResponse. +func (in *DbfsListResponse) DeepCopy() *DbfsListResponse { + if in == nil { + return nil + } + out := new(DbfsListResponse) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DbfsReadResponse) DeepCopyInto(out *DbfsReadResponse) { + *out = *in + if in.Data != nil { + in, out := &in.Data, &out.Data + *out = make([]byte, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsReadResponse. +func (in *DbfsReadResponse) DeepCopy() *DbfsReadResponse { + if in == nil { + return nil + } + out := new(DbfsReadResponse) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *GroupsAPI) DeepCopyInto(out *GroupsAPI) { + *out = *in + in.Client.DeepCopyInto(&out.Client) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GroupsAPI. +func (in *GroupsAPI) DeepCopy() *GroupsAPI { + if in == nil { + return nil + } + out := new(GroupsAPI) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *GroupsCreateResponse) DeepCopyInto(out *GroupsCreateResponse) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GroupsCreateResponse. +func (in *GroupsCreateResponse) DeepCopy() *GroupsCreateResponse { + if in == nil { + return nil + } + out := new(GroupsCreateResponse) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *InstanceProfilesAPI) DeepCopyInto(out *InstanceProfilesAPI) { + *out = *in + in.Client.DeepCopyInto(&out.Client) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstanceProfilesAPI. +func (in *InstanceProfilesAPI) DeepCopy() *InstanceProfilesAPI { + if in == nil { + return nil + } + out := new(InstanceProfilesAPI) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *JobsAPI) DeepCopyInto(out *JobsAPI) { + *out = *in + in.Client.DeepCopyInto(&out.Client) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsAPI. +func (in *JobsAPI) DeepCopy() *JobsAPI { + if in == nil { + return nil + } + out := new(JobsAPI) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *JobsRunsGetOutputResponse) DeepCopyInto(out *JobsRunsGetOutputResponse) { + *out = *in + out.NotebookOutput = in.NotebookOutput + in.Metadata.DeepCopyInto(&out.Metadata) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsRunsGetOutputResponse. +func (in *JobsRunsGetOutputResponse) DeepCopy() *JobsRunsGetOutputResponse { + if in == nil { + return nil + } + out := new(JobsRunsGetOutputResponse) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *JobsRunsListResponse) DeepCopyInto(out *JobsRunsListResponse) { + *out = *in + if in.Runs != nil { + in, out := &in.Runs, &out.Runs + *out = make([]models.Run, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsRunsListResponse. +func (in *JobsRunsListResponse) DeepCopy() *JobsRunsListResponse { + if in == nil { + return nil + } + out := new(JobsRunsListResponse) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *LibrariesAPI) DeepCopyInto(out *LibrariesAPI) { + *out = *in + in.Client.DeepCopyInto(&out.Client) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LibrariesAPI. +func (in *LibrariesAPI) DeepCopy() *LibrariesAPI { + if in == nil { + return nil + } + out := new(LibrariesAPI) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *LibrariesClusterStatusResponse) DeepCopyInto(out *LibrariesClusterStatusResponse) { + *out = *in + if in.LibraryStatuses != nil { + in, out := &in.LibraryStatuses, &out.LibraryStatuses + *out = make([]models.LibraryFullStatus, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LibrariesClusterStatusResponse. +func (in *LibrariesClusterStatusResponse) DeepCopy() *LibrariesClusterStatusResponse { + if in == nil { + return nil + } + out := new(LibrariesClusterStatusResponse) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ScimAPI) DeepCopyInto(out *ScimAPI) { + *out = *in + in.Client.DeepCopyInto(&out.Client) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScimAPI. +func (in *ScimAPI) DeepCopy() *ScimAPI { + if in == nil { + return nil + } + out := new(ScimAPI) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SecretsAPI) DeepCopyInto(out *SecretsAPI) { + *out = *in + in.Client.DeepCopyInto(&out.Client) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecretsAPI. +func (in *SecretsAPI) DeepCopy() *SecretsAPI { + if in == nil { + return nil + } + out := new(SecretsAPI) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *TokenAPI) DeepCopyInto(out *TokenAPI) { + *out = *in + in.Client.DeepCopyInto(&out.Client) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TokenAPI. +func (in *TokenAPI) DeepCopy() *TokenAPI { + if in == nil { + return nil + } + out := new(TokenAPI) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *TokenCreateResponse) DeepCopyInto(out *TokenCreateResponse) { + *out = *in + out.TokenInfo = in.TokenInfo + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TokenCreateResponse. +func (in *TokenCreateResponse) DeepCopy() *TokenCreateResponse { + if in == nil { + return nil + } + out := new(TokenCreateResponse) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *WorkspaceAPI) DeepCopyInto(out *WorkspaceAPI) { + *out = *in + in.Client.DeepCopyInto(&out.Client) + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkspaceAPI. +func (in *WorkspaceAPI) DeepCopy() *WorkspaceAPI { + if in == nil { + return nil + } + out := new(WorkspaceAPI) + in.DeepCopyInto(out) + return out +} diff --git a/aws/doc.go b/aws/doc.go index b3b79bf..78d906f 100644 --- a/aws/doc.go +++ b/aws/doc.go @@ -1,2 +1,2 @@ -// +k8s:deepcopy-gen=package -package aws +// +k8s:deepcopy-gen=package +package aws diff --git a/aws/groups.go b/aws/groups.go index b701c37..2069053 100644 --- a/aws/groups.go +++ b/aws/groups.go @@ -1,139 +1,139 @@ -package aws - -import ( - "encoding/json" - "net/http" - - "github.com/xinsnake/databricks-sdk-golang/aws/models" -) - -// GroupsAPI exposes the Groups API -type GroupsAPI struct { - Client DBClient -} - -func (a GroupsAPI) init(client DBClient) GroupsAPI { - a.Client = client - return a -} - -// AddMember adds a user or group to a group -func (a GroupsAPI) AddMember(principalName models.PrincipalName, parentName string) error { - data := struct { - UserName string `json:"user_name,omitempty" url:"user_name,omitempty"` - GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` - ParentName string `json:"parent_name,omitempty" url:"parent_name,omitempty"` - }{ - principalName.UserName, - principalName.GroupName, - parentName, - } - _, err := a.Client.performQuery(http.MethodPost, "/groups/add-member", data, nil) - return err -} - -// GroupsCreateResponse is a response with group name for Create -type GroupsCreateResponse struct { - GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` -} - -// Create creates a new group with the given name -func (a GroupsAPI) Create(groupName string) (GroupsCreateResponse, error) { - var createResponse GroupsCreateResponse - - data := struct { - GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` - }{ - groupName, - } - resp, err := a.Client.performQuery(http.MethodPost, "/groups/create", data, nil) - if err != nil { - return createResponse, err - } - - err = json.Unmarshal(resp, &createResponse) - return createResponse, err -} - -// ListMembers returns all of the members of a particular group -func (a GroupsAPI) ListMembers(groupName string) ([]models.PrincipalName, error) { - var membersResponse struct { - Members []models.PrincipalName `json:"members,omitempty" url:"members,omitempty"` - } - - data := struct { - GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` - }{ - groupName, - } - resp, err := a.Client.performQuery(http.MethodGet, "/groups/list-members", data, nil) - if err != nil { - return membersResponse.Members, err - } - - err = json.Unmarshal(resp, &membersResponse) - return membersResponse.Members, err -} - -// List returns all of the groups in an organization -func (a GroupsAPI) List() ([]string, error) { - var listResponse struct { - GroupNames []string `json:"group_names,omitempty" url:"group_names,omitempty"` - } - - resp, err := a.Client.performQuery(http.MethodGet, "/groups/list", nil, nil) - if err != nil { - return listResponse.GroupNames, err - } - - err = json.Unmarshal(resp, &listResponse) - return listResponse.GroupNames, err -} - -// ListParents retrieves all groups in which a given user or group is a member -func (a GroupsAPI) ListParents(principalName models.PrincipalName) ([]string, error) { - var listParentsResponse struct { - GroupNames []string `json:"group_names,omitempty" url:"group_names,omitempty"` - } - - data := struct { - UserName string `json:"user_name,omitempty" url:"user_name,omitempty"` - GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` - }{ - principalName.UserName, - principalName.GroupName, - } - resp, err := a.Client.performQuery(http.MethodGet, "/groups/list-members", data, nil) - if err != nil { - return listParentsResponse.GroupNames, err - } - - err = json.Unmarshal(resp, &listParentsResponse) - return listParentsResponse.GroupNames, err -} - -// RemoveMember removes a user or group from a group -func (a GroupsAPI) RemoveMember(principalName models.PrincipalName, parentName string) error { - data := struct { - UserName string `json:"user_name,omitempty" url:"user_name,omitempty"` - GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` - ParentName string `json:"parent_name,omitempty" url:"parent_name,omitempty"` - }{ - principalName.UserName, - principalName.GroupName, - parentName, - } - _, err := a.Client.performQuery(http.MethodPost, "/groups/remove-member", data, nil) - return err -} - -// Delete removes a group from this organization -func (a GroupsAPI) Delete(groupName string) error { - data := struct { - GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` - }{ - groupName, - } - _, err := a.Client.performQuery(http.MethodPost, "/groups/delete", data, nil) - return err -} +package aws + +import ( + "encoding/json" + "net/http" + + "github.com/xinsnake/databricks-sdk-golang/aws/models" +) + +// GroupsAPI exposes the Groups API +type GroupsAPI struct { + Client DBClient +} + +func (a GroupsAPI) init(client DBClient) GroupsAPI { + a.Client = client + return a +} + +// AddMember adds a user or group to a group +func (a GroupsAPI) AddMember(principalName models.PrincipalName, parentName string) error { + data := struct { + UserName string `json:"user_name,omitempty" url:"user_name,omitempty"` + GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` + ParentName string `json:"parent_name,omitempty" url:"parent_name,omitempty"` + }{ + principalName.UserName, + principalName.GroupName, + parentName, + } + _, err := a.Client.performQuery(http.MethodPost, "/groups/add-member", data, nil) + return err +} + +// GroupsCreateResponse is a response with group name for Create +type GroupsCreateResponse struct { + GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` +} + +// Create creates a new group with the given name +func (a GroupsAPI) Create(groupName string) (GroupsCreateResponse, error) { + var createResponse GroupsCreateResponse + + data := struct { + GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` + }{ + groupName, + } + resp, err := a.Client.performQuery(http.MethodPost, "/groups/create", data, nil) + if err != nil { + return createResponse, err + } + + err = json.Unmarshal(resp, &createResponse) + return createResponse, err +} + +// ListMembers returns all of the members of a particular group +func (a GroupsAPI) ListMembers(groupName string) ([]models.PrincipalName, error) { + var membersResponse struct { + Members []models.PrincipalName `json:"members,omitempty" url:"members,omitempty"` + } + + data := struct { + GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` + }{ + groupName, + } + resp, err := a.Client.performQuery(http.MethodGet, "/groups/list-members", data, nil) + if err != nil { + return membersResponse.Members, err + } + + err = json.Unmarshal(resp, &membersResponse) + return membersResponse.Members, err +} + +// List returns all of the groups in an organization +func (a GroupsAPI) List() ([]string, error) { + var listResponse struct { + GroupNames []string `json:"group_names,omitempty" url:"group_names,omitempty"` + } + + resp, err := a.Client.performQuery(http.MethodGet, "/groups/list", nil, nil) + if err != nil { + return listResponse.GroupNames, err + } + + err = json.Unmarshal(resp, &listResponse) + return listResponse.GroupNames, err +} + +// ListParents retrieves all groups in which a given user or group is a member +func (a GroupsAPI) ListParents(principalName models.PrincipalName) ([]string, error) { + var listParentsResponse struct { + GroupNames []string `json:"group_names,omitempty" url:"group_names,omitempty"` + } + + data := struct { + UserName string `json:"user_name,omitempty" url:"user_name,omitempty"` + GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` + }{ + principalName.UserName, + principalName.GroupName, + } + resp, err := a.Client.performQuery(http.MethodGet, "/groups/list-members", data, nil) + if err != nil { + return listParentsResponse.GroupNames, err + } + + err = json.Unmarshal(resp, &listParentsResponse) + return listParentsResponse.GroupNames, err +} + +// RemoveMember removes a user or group from a group +func (a GroupsAPI) RemoveMember(principalName models.PrincipalName, parentName string) error { + data := struct { + UserName string `json:"user_name,omitempty" url:"user_name,omitempty"` + GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` + ParentName string `json:"parent_name,omitempty" url:"parent_name,omitempty"` + }{ + principalName.UserName, + principalName.GroupName, + parentName, + } + _, err := a.Client.performQuery(http.MethodPost, "/groups/remove-member", data, nil) + return err +} + +// Delete removes a group from this organization +func (a GroupsAPI) Delete(groupName string) error { + data := struct { + GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` + }{ + groupName, + } + _, err := a.Client.performQuery(http.MethodPost, "/groups/delete", data, nil) + return err +} diff --git a/aws/instance_profiles.go b/aws/instance_profiles.go index d23d987..6555ab4 100644 --- a/aws/instance_profiles.go +++ b/aws/instance_profiles.go @@ -1,57 +1,57 @@ -package aws - -import ( - "encoding/json" - "net/http" - - "github.com/xinsnake/databricks-sdk-golang/aws/models" -) - -// InstanceProfilesAPI exposes the Instance Profiles API -type InstanceProfilesAPI struct { - Client DBClient -} - -func (a InstanceProfilesAPI) init(client DBClient) InstanceProfilesAPI { - a.Client = client - return a -} - -// Add registers an instance profile in Databricks -func (a InstanceProfilesAPI) Add(instanceProfileArn string, skipValidation bool) error { - data := struct { - InstanceProfileArn string `json:"instance_profile_arn,omitempty" url:"instance_profile_arn,omitempty"` - SkipValidation bool `json:"skip_validation,omitempty" url:"skip_validation,omitempty"` - }{ - instanceProfileArn, - skipValidation, - } - _, err := a.Client.performQuery(http.MethodPost, "/instance-profiles/add", data, nil) - return err -} - -// List lists the instance profiles that the calling user can use to launch a cluster -func (a InstanceProfilesAPI) List() ([]models.InstanceProfile, error) { - var listResponse struct { - InstanceProfiles []models.InstanceProfile `json:"instance_profiles,omitempty" url:"instance_profiles,omitempty"` - } - - resp, err := a.Client.performQuery(http.MethodGet, "/instance-profiles/list", nil, nil) - if err != nil { - return listResponse.InstanceProfiles, err - } - - err = json.Unmarshal(resp, &listResponse) - return listResponse.InstanceProfiles, err -} - -// Remove removes the instance profile with the provided ARN -func (a InstanceProfilesAPI) Remove(instanceProfileArn string) error { - data := struct { - InstanceProfileArn string `json:"instance_profile_arn,omitempty" url:"instance_profile_arn,omitempty"` - }{ - instanceProfileArn, - } - _, err := a.Client.performQuery(http.MethodPost, "/instance-profiles/remove", data, nil) - return err -} +package aws + +import ( + "encoding/json" + "net/http" + + "github.com/xinsnake/databricks-sdk-golang/aws/models" +) + +// InstanceProfilesAPI exposes the Instance Profiles API +type InstanceProfilesAPI struct { + Client DBClient +} + +func (a InstanceProfilesAPI) init(client DBClient) InstanceProfilesAPI { + a.Client = client + return a +} + +// Add registers an instance profile in Databricks +func (a InstanceProfilesAPI) Add(instanceProfileArn string, skipValidation bool) error { + data := struct { + InstanceProfileArn string `json:"instance_profile_arn,omitempty" url:"instance_profile_arn,omitempty"` + SkipValidation bool `json:"skip_validation,omitempty" url:"skip_validation,omitempty"` + }{ + instanceProfileArn, + skipValidation, + } + _, err := a.Client.performQuery(http.MethodPost, "/instance-profiles/add", data, nil) + return err +} + +// List lists the instance profiles that the calling user can use to launch a cluster +func (a InstanceProfilesAPI) List() ([]models.InstanceProfile, error) { + var listResponse struct { + InstanceProfiles []models.InstanceProfile `json:"instance_profiles,omitempty" url:"instance_profiles,omitempty"` + } + + resp, err := a.Client.performQuery(http.MethodGet, "/instance-profiles/list", nil, nil) + if err != nil { + return listResponse.InstanceProfiles, err + } + + err = json.Unmarshal(resp, &listResponse) + return listResponse.InstanceProfiles, err +} + +// Remove removes the instance profile with the provided ARN +func (a InstanceProfilesAPI) Remove(instanceProfileArn string) error { + data := struct { + InstanceProfileArn string `json:"instance_profile_arn,omitempty" url:"instance_profile_arn,omitempty"` + }{ + instanceProfileArn, + } + _, err := a.Client.performQuery(http.MethodPost, "/instance-profiles/remove", data, nil) + return err +} diff --git a/aws/jobs.go b/aws/jobs.go index 20fa64b..fe3dcbf 100644 --- a/aws/jobs.go +++ b/aws/jobs.go @@ -1,249 +1,249 @@ -package aws - -import ( - "encoding/json" - "net/http" - - "github.com/xinsnake/databricks-sdk-golang/aws/models" -) - -// JobsAPI exposes Jobs API endpoints -type JobsAPI struct { - Client DBClient -} - -func (a JobsAPI) init(client DBClient) JobsAPI { - a.Client = client - return a -} - -// Create creates a new job -func (a JobsAPI) Create(jobSettings models.JobSettings) (models.Job, error) { - var job models.Job - - resp, err := a.Client.performQuery(http.MethodPost, "/jobs/create", jobSettings, nil) - if err != nil { - return job, err - } - - err = json.Unmarshal(resp, &job) - return job, err -} - -// List lists all jobs -func (a JobsAPI) List() ([]models.Job, error) { - var jobsList = struct { - Jobs []models.Job `json:"jobs,omitempty" url:"jobs,omitempty"` - }{} - - resp, err := a.Client.performQuery(http.MethodGet, "/jobs/list", nil, nil) - if err != nil { - return jobsList.Jobs, err - } - - err = json.Unmarshal(resp, &jobsList) - return jobsList.Jobs, err -} - -// Delete deletes a job by ID -func (a JobsAPI) Delete(jobID int64) error { - data := struct { - JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` - }{ - jobID, - } - _, err := a.Client.performQuery(http.MethodPost, "/jobs/delete", data, nil) - return err -} - -// Get gets a job by ID -func (a JobsAPI) Get(jobID int64) (models.Job, error) { - var job models.Job - - data := struct { - JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` - }{ - jobID, - } - resp, err := a.Client.performQuery(http.MethodGet, "/jobs/get", data, nil) - if err != nil { - return job, err - } - - err = json.Unmarshal(resp, &job) - return job, err -} - -// Reset overwrites job settings -func (a JobsAPI) Reset(jobID int64, jobSettings models.JobSettings) error { - data := struct { - JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` - NewSettings models.JobSettings `json:"new_settings,omitempty" url:"new_settings,omitempty"` - }{ - jobID, - jobSettings, - } - _, err := a.Client.performQuery(http.MethodPost, "/jobs/reset", data, nil) - return err -} - -// RunNow runs a job now and return the run_id of the triggered run -func (a JobsAPI) RunNow(jobID int64, runParameters models.RunParameters) (models.Run, error) { - var run models.Run - - data := struct { - JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` - models.RunParameters - }{ - jobID, - runParameters, - } - resp, err := a.Client.performQuery(http.MethodPost, "/jobs/run-now", data, nil) - if err != nil { - return run, err - } - - err = json.Unmarshal(resp, &run) - return run, err -} - -// RunsSubmit submit a one-time run -func (a JobsAPI) RunsSubmit(runName string, clusterSpec models.ClusterSpec, jobTask models.JobTask, timeoutSeconds int32) (models.Run, error) { - var run models.Run - - data := struct { - RunName string `json:"run_name,omitempty" url:"run_name,omitempty"` - models.ClusterSpec - models.JobTask - TimeoutSeconds int32 `json:"timeout_seconds,omitempty" url:"timeout_seconds,omitempty"` - }{ - runName, - clusterSpec, - jobTask, - timeoutSeconds, - } - resp, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/submit", data, nil) - if err != nil { - return run, err - } - - err = json.Unmarshal(resp, &run) - return run, err -} - -// JobsRunsListResponse is a bit special because it has a HasMore field -type JobsRunsListResponse struct { - Runs []models.Run `json:"runs,omitempty" url:"runs,omitempty"` - HasMore bool `json:"has_more,omitempty" url:"has_more,omitempty"` -} - -// RunsList lists runs from most recently started to least -func (a JobsAPI) RunsList(activeOnly, completedOnly bool, jobID int64, offset, limit int32) (JobsRunsListResponse, error) { - var runlistResponse JobsRunsListResponse - - data := struct { - ActiveOnly bool `json:"active_only,omitempty" url:"active_only,omitempty"` - CompletedOnly bool `json:"completed_only,omitempty" url:"completed_only,omitempty"` - JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` - Offset int32 `json:"offset,omitempty" url:"offset,omitempty"` - Limit int32 `json:"limit,omitempty" url:"limit,omitempty"` - }{ - activeOnly, - completedOnly, - jobID, - offset, - limit, - } - resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/list", data, nil) - if err != nil { - return runlistResponse, err - } - - err = json.Unmarshal(resp, &runlistResponse) - return runlistResponse, err -} - -// RunsGet retrieve the metadata of a run -func (a JobsAPI) RunsGet(runID int64) (models.Run, error) { - var run models.Run - - data := struct { - RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` - }{ - runID, - } - resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/get", data, nil) - if err != nil { - return run, err - } - - err = json.Unmarshal(resp, &run) - return run, err -} - -// RunsExport exports and retrieve the job run task -func (a JobsAPI) RunsExport(runID int64) ([]models.ViewItem, error) { - var viewItemsView = struct { - Views []models.ViewItem `json:"views,omitempty" url:"views,omitempty"` - }{} - - data := struct { - RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` - }{ - runID, - } - resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/export", data, nil) - if err != nil { - return viewItemsView.Views, err - } - - err = json.Unmarshal(resp, &viewItemsView) - return viewItemsView.Views, err -} - -// RunsCancel cancels a run -func (a JobsAPI) RunsCancel(runID int64) error { - data := struct { - RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` - }{ - runID, - } - _, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/cancel", data, nil) - return err -} - -// JobsRunsGetOutputResponse is the output of the run -type JobsRunsGetOutputResponse struct { - NotebookOutput models.NotebookOutput `json:"notebook_output,omitempty" url:"notebook_output,omitempty"` - Error string `json:"error,omitempty" url:"error,omitempty"` - Metadata models.Run `json:"metadata,omitempty" url:"metadata,omitempty"` -} - -// RunsGetOutput retrieves the output of a run -func (a JobsAPI) RunsGetOutput(runID int64) (JobsRunsGetOutputResponse, error) { - var runsGetOutputResponse JobsRunsGetOutputResponse - - data := struct { - RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` - }{ - runID, - } - resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/get-output", data, nil) - if err != nil { - return runsGetOutputResponse, err - } - - err = json.Unmarshal(resp, &runsGetOutputResponse) - return runsGetOutputResponse, err -} - -// RunsDelete deletes a non-active run. Returns an error if the run is active. -func (a JobsAPI) RunsDelete(runID int64) error { - data := struct { - RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` - }{ - runID, - } - _, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/delete", data, nil) - return err -} +package aws + +import ( + "encoding/json" + "net/http" + + "github.com/xinsnake/databricks-sdk-golang/aws/models" +) + +// JobsAPI exposes Jobs API endpoints +type JobsAPI struct { + Client DBClient +} + +func (a JobsAPI) init(client DBClient) JobsAPI { + a.Client = client + return a +} + +// Create creates a new job +func (a JobsAPI) Create(jobSettings models.JobSettings) (models.Job, error) { + var job models.Job + + resp, err := a.Client.performQuery(http.MethodPost, "/jobs/create", jobSettings, nil) + if err != nil { + return job, err + } + + err = json.Unmarshal(resp, &job) + return job, err +} + +// List lists all jobs +func (a JobsAPI) List() ([]models.Job, error) { + var jobsList = struct { + Jobs []models.Job `json:"jobs,omitempty" url:"jobs,omitempty"` + }{} + + resp, err := a.Client.performQuery(http.MethodGet, "/jobs/list", nil, nil) + if err != nil { + return jobsList.Jobs, err + } + + err = json.Unmarshal(resp, &jobsList) + return jobsList.Jobs, err +} + +// Delete deletes a job by ID +func (a JobsAPI) Delete(jobID int64) error { + data := struct { + JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` + }{ + jobID, + } + _, err := a.Client.performQuery(http.MethodPost, "/jobs/delete", data, nil) + return err +} + +// Get gets a job by ID +func (a JobsAPI) Get(jobID int64) (models.Job, error) { + var job models.Job + + data := struct { + JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` + }{ + jobID, + } + resp, err := a.Client.performQuery(http.MethodGet, "/jobs/get", data, nil) + if err != nil { + return job, err + } + + err = json.Unmarshal(resp, &job) + return job, err +} + +// Reset overwrites job settings +func (a JobsAPI) Reset(jobID int64, jobSettings models.JobSettings) error { + data := struct { + JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` + NewSettings models.JobSettings `json:"new_settings,omitempty" url:"new_settings,omitempty"` + }{ + jobID, + jobSettings, + } + _, err := a.Client.performQuery(http.MethodPost, "/jobs/reset", data, nil) + return err +} + +// RunNow runs a job now and return the run_id of the triggered run +func (a JobsAPI) RunNow(jobID int64, runParameters models.RunParameters) (models.Run, error) { + var run models.Run + + data := struct { + JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` + models.RunParameters + }{ + jobID, + runParameters, + } + resp, err := a.Client.performQuery(http.MethodPost, "/jobs/run-now", data, nil) + if err != nil { + return run, err + } + + err = json.Unmarshal(resp, &run) + return run, err +} + +// RunsSubmit submit a one-time run +func (a JobsAPI) RunsSubmit(runName string, clusterSpec models.ClusterSpec, jobTask models.JobTask, timeoutSeconds int32) (models.Run, error) { + var run models.Run + + data := struct { + RunName string `json:"run_name,omitempty" url:"run_name,omitempty"` + models.ClusterSpec + models.JobTask + TimeoutSeconds int32 `json:"timeout_seconds,omitempty" url:"timeout_seconds,omitempty"` + }{ + runName, + clusterSpec, + jobTask, + timeoutSeconds, + } + resp, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/submit", data, nil) + if err != nil { + return run, err + } + + err = json.Unmarshal(resp, &run) + return run, err +} + +// JobsRunsListResponse is a bit special because it has a HasMore field +type JobsRunsListResponse struct { + Runs []models.Run `json:"runs,omitempty" url:"runs,omitempty"` + HasMore bool `json:"has_more,omitempty" url:"has_more,omitempty"` +} + +// RunsList lists runs from most recently started to least +func (a JobsAPI) RunsList(activeOnly, completedOnly bool, jobID int64, offset, limit int32) (JobsRunsListResponse, error) { + var runlistResponse JobsRunsListResponse + + data := struct { + ActiveOnly bool `json:"active_only,omitempty" url:"active_only,omitempty"` + CompletedOnly bool `json:"completed_only,omitempty" url:"completed_only,omitempty"` + JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` + Offset int32 `json:"offset,omitempty" url:"offset,omitempty"` + Limit int32 `json:"limit,omitempty" url:"limit,omitempty"` + }{ + activeOnly, + completedOnly, + jobID, + offset, + limit, + } + resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/list", data, nil) + if err != nil { + return runlistResponse, err + } + + err = json.Unmarshal(resp, &runlistResponse) + return runlistResponse, err +} + +// RunsGet retrieve the metadata of a run +func (a JobsAPI) RunsGet(runID int64) (models.Run, error) { + var run models.Run + + data := struct { + RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` + }{ + runID, + } + resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/get", data, nil) + if err != nil { + return run, err + } + + err = json.Unmarshal(resp, &run) + return run, err +} + +// RunsExport exports and retrieve the job run task +func (a JobsAPI) RunsExport(runID int64) ([]models.ViewItem, error) { + var viewItemsView = struct { + Views []models.ViewItem `json:"views,omitempty" url:"views,omitempty"` + }{} + + data := struct { + RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` + }{ + runID, + } + resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/export", data, nil) + if err != nil { + return viewItemsView.Views, err + } + + err = json.Unmarshal(resp, &viewItemsView) + return viewItemsView.Views, err +} + +// RunsCancel cancels a run +func (a JobsAPI) RunsCancel(runID int64) error { + data := struct { + RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` + }{ + runID, + } + _, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/cancel", data, nil) + return err +} + +// JobsRunsGetOutputResponse is the output of the run +type JobsRunsGetOutputResponse struct { + NotebookOutput models.NotebookOutput `json:"notebook_output,omitempty" url:"notebook_output,omitempty"` + Error string `json:"error,omitempty" url:"error,omitempty"` + Metadata models.Run `json:"metadata,omitempty" url:"metadata,omitempty"` +} + +// RunsGetOutput retrieves the output of a run +func (a JobsAPI) RunsGetOutput(runID int64) (JobsRunsGetOutputResponse, error) { + var runsGetOutputResponse JobsRunsGetOutputResponse + + data := struct { + RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` + }{ + runID, + } + resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/get-output", data, nil) + if err != nil { + return runsGetOutputResponse, err + } + + err = json.Unmarshal(resp, &runsGetOutputResponse) + return runsGetOutputResponse, err +} + +// RunsDelete deletes a non-active run. Returns an error if the run is active. +func (a JobsAPI) RunsDelete(runID int64) error { + data := struct { + RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` + }{ + runID, + } + _, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/delete", data, nil) + return err +} diff --git a/aws/libraries.go b/aws/libraries.go index f07c70e..15bf8d2 100644 --- a/aws/libraries.go +++ b/aws/libraries.go @@ -1,83 +1,83 @@ -package aws - -import ( - "encoding/json" - "net/http" - - "github.com/xinsnake/databricks-sdk-golang/aws/models" -) - -// LibrariesAPI exposes the Libraries API -type LibrariesAPI struct { - Client DBClient -} - -func (a LibrariesAPI) init(client DBClient) LibrariesAPI { - a.Client = client - return a -} - -// AllClusterStatuses gets the status of all libraries on all clusters -func (a LibrariesAPI) AllClusterStatuses() ([]models.ClusterLibraryStatuses, error) { - var allClusterStatusesResponse struct { - Statuses []models.ClusterLibraryStatuses `json:"statuses,omitempty" url:"statuses,omitempty"` - } - - resp, err := a.Client.performQuery(http.MethodGet, "/libraries/all-cluster-statuses", nil, nil) - if err != nil { - return allClusterStatusesResponse.Statuses, err - } - - err = json.Unmarshal(resp, &allClusterStatusesResponse) - return allClusterStatusesResponse.Statuses, err -} - -// LibrariesClusterStatusResponse is a response from AllClusterStatuses -type LibrariesClusterStatusResponse struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - LibraryStatuses []models.LibraryFullStatus `json:"library_statuses,omitempty" url:"library_statuses,omitempty"` -} - -// ClusterStatus get the status of libraries on a cluster -func (a LibrariesAPI) ClusterStatus(clusterID string) (LibrariesClusterStatusResponse, error) { - var clusterStatusResponse LibrariesClusterStatusResponse - - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - }{ - clusterID, - } - resp, err := a.Client.performQuery(http.MethodGet, "/libraries/cluster-status", data, nil) - if err != nil { - return clusterStatusResponse, err - } - - err = json.Unmarshal(resp, &clusterStatusResponse) - return clusterStatusResponse, err -} - -// Install installs libraries on a cluster -func (a LibrariesAPI) Install(clusterID string, libraries []models.Library) error { - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - Libraries []models.Library `json:"libraries,omitempty" url:"libraries,omitempty"` - }{ - clusterID, - libraries, - } - _, err := a.Client.performQuery(http.MethodPost, "/libraries/install", data, nil) - return err -} - -// Uninstall sets libraries to be uninstalled on a cluster -func (a LibrariesAPI) Uninstall(clusterID string, libraries []models.Library) error { - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - Libraries []models.Library `json:"libraries,omitempty" url:"libraries,omitempty"` - }{ - clusterID, - libraries, - } - _, err := a.Client.performQuery(http.MethodPost, "/libraries/uninstall", data, nil) - return err -} +package aws + +import ( + "encoding/json" + "net/http" + + "github.com/xinsnake/databricks-sdk-golang/aws/models" +) + +// LibrariesAPI exposes the Libraries API +type LibrariesAPI struct { + Client DBClient +} + +func (a LibrariesAPI) init(client DBClient) LibrariesAPI { + a.Client = client + return a +} + +// AllClusterStatuses gets the status of all libraries on all clusters +func (a LibrariesAPI) AllClusterStatuses() ([]models.ClusterLibraryStatuses, error) { + var allClusterStatusesResponse struct { + Statuses []models.ClusterLibraryStatuses `json:"statuses,omitempty" url:"statuses,omitempty"` + } + + resp, err := a.Client.performQuery(http.MethodGet, "/libraries/all-cluster-statuses", nil, nil) + if err != nil { + return allClusterStatusesResponse.Statuses, err + } + + err = json.Unmarshal(resp, &allClusterStatusesResponse) + return allClusterStatusesResponse.Statuses, err +} + +// LibrariesClusterStatusResponse is a response from AllClusterStatuses +type LibrariesClusterStatusResponse struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + LibraryStatuses []models.LibraryFullStatus `json:"library_statuses,omitempty" url:"library_statuses,omitempty"` +} + +// ClusterStatus get the status of libraries on a cluster +func (a LibrariesAPI) ClusterStatus(clusterID string) (LibrariesClusterStatusResponse, error) { + var clusterStatusResponse LibrariesClusterStatusResponse + + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + }{ + clusterID, + } + resp, err := a.Client.performQuery(http.MethodGet, "/libraries/cluster-status", data, nil) + if err != nil { + return clusterStatusResponse, err + } + + err = json.Unmarshal(resp, &clusterStatusResponse) + return clusterStatusResponse, err +} + +// Install installs libraries on a cluster +func (a LibrariesAPI) Install(clusterID string, libraries []models.Library) error { + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + Libraries []models.Library `json:"libraries,omitempty" url:"libraries,omitempty"` + }{ + clusterID, + libraries, + } + _, err := a.Client.performQuery(http.MethodPost, "/libraries/install", data, nil) + return err +} + +// Uninstall sets libraries to be uninstalled on a cluster +func (a LibrariesAPI) Uninstall(clusterID string, libraries []models.Library) error { + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + Libraries []models.Library `json:"libraries,omitempty" url:"libraries,omitempty"` + }{ + clusterID, + libraries, + } + _, err := a.Client.performQuery(http.MethodPost, "/libraries/uninstall", data, nil) + return err +} diff --git a/aws/models/AclItem.go b/aws/models/AclItem.go index ff5c34a..2c015f9 100644 --- a/aws/models/AclItem.go +++ b/aws/models/AclItem.go @@ -1,6 +1,6 @@ -package models - -type AclItem struct { - Principal string `json:"principal,omitempty" url:"principal,omitempty"` - Permission *AclPermission `json:"permission,omitempty" url:"permission,omitempty"` -} +package models + +type AclItem struct { + Principal string `json:"principal,omitempty" url:"principal,omitempty"` + Permission *AclPermission `json:"permission,omitempty" url:"permission,omitempty"` +} diff --git a/aws/models/AclPermission.go b/aws/models/AclPermission.go index b20f185..2f77ef1 100644 --- a/aws/models/AclPermission.go +++ b/aws/models/AclPermission.go @@ -1,9 +1,9 @@ -package models - -type AclPermission string - -const ( - AclPermissionRead = "READ" - AclPermissionWrite = "WRITE" - AclPermissionManage = "MANAGE" -) +package models + +type AclPermission string + +const ( + AclPermissionRead = "READ" + AclPermissionWrite = "WRITE" + AclPermissionManage = "MANAGE" +) diff --git a/aws/models/AutoScale.go b/aws/models/AutoScale.go index b5f2648..bbf1f69 100644 --- a/aws/models/AutoScale.go +++ b/aws/models/AutoScale.go @@ -1,6 +1,6 @@ -package models - -type AutoScale struct { - MinWorkers int32 `json:"min_workers,omitempty" url:"min_workers,omitempty"` - MaxWorkers int32 `json:"max_workers,omitempty" url:"max_workers,omitempty"` -} +package models + +type AutoScale struct { + MinWorkers int32 `json:"min_workers,omitempty" url:"min_workers,omitempty"` + MaxWorkers int32 `json:"max_workers,omitempty" url:"max_workers,omitempty"` +} diff --git a/aws/models/AwsAttributes.go b/aws/models/AwsAttributes.go index 7716b2c..5c9fa8e 100644 --- a/aws/models/AwsAttributes.go +++ b/aws/models/AwsAttributes.go @@ -1,12 +1,12 @@ -package models - -type AwsAttributes struct { - FirstOnDemand int32 `json:"first_on_demand,omitempty" url:"first_on_demand,omitempty"` - Availability *AwsAvailability `json:"availability,omitempty" url:"availability,omitempty"` - ZoneID string `json:"zone_id,omitempty" url:"zone_id,omitempty"` - InstanceProfileArn string `json:"instance_profile_arn,omitempty" url:"instance_profile_arn,omitempty"` - SpotBidPricePercent int32 `json:"spot_bid_price_percent,omitempty" url:"spot_bid_price_percent,omitempty"` - EbsVolumeType *EbsVolumeType `json:"ebs_volume_type,omitempty" url:"ebs_volume_type,omitempty"` - EbsVolumeCount int32 `json:"ebs_volume_count,omitempty" url:"ebs_volume_count,omitempty"` - EbsVolumeSize int32 `json:"ebs_volume_size,omitempty" url:"ebs_volume_size,omitempty"` -} +package models + +type AwsAttributes struct { + FirstOnDemand int32 `json:"first_on_demand,omitempty" url:"first_on_demand,omitempty"` + Availability *AwsAvailability `json:"availability,omitempty" url:"availability,omitempty"` + ZoneID string `json:"zone_id,omitempty" url:"zone_id,omitempty"` + InstanceProfileArn string `json:"instance_profile_arn,omitempty" url:"instance_profile_arn,omitempty"` + SpotBidPricePercent int32 `json:"spot_bid_price_percent,omitempty" url:"spot_bid_price_percent,omitempty"` + EbsVolumeType *EbsVolumeType `json:"ebs_volume_type,omitempty" url:"ebs_volume_type,omitempty"` + EbsVolumeCount int32 `json:"ebs_volume_count,omitempty" url:"ebs_volume_count,omitempty"` + EbsVolumeSize int32 `json:"ebs_volume_size,omitempty" url:"ebs_volume_size,omitempty"` +} diff --git a/aws/models/AwsAvailability.go b/aws/models/AwsAvailability.go index 7ef4f55..a6fc38a 100644 --- a/aws/models/AwsAvailability.go +++ b/aws/models/AwsAvailability.go @@ -1,9 +1,9 @@ -package models - -type AwsAvailability string - -const ( - AwsAvailabilitySpot = "SPOT" - AwsAvailabilityOnDemand = "ON_DEMAND" - AwsAvailabilitySpotWithFallback = "SPOT_WITH_FALLBACK" -) +package models + +type AwsAvailability string + +const ( + AwsAvailabilitySpot = "SPOT" + AwsAvailabilityOnDemand = "ON_DEMAND" + AwsAvailabilitySpotWithFallback = "SPOT_WITH_FALLBACK" +) diff --git a/aws/models/ClusterAttributes.go b/aws/models/ClusterAttributes.go index b6f8492..87b59ec 100644 --- a/aws/models/ClusterAttributes.go +++ b/aws/models/ClusterAttributes.go @@ -1,18 +1,18 @@ -package models - -type ClusterAttributes struct { - ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"` - SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"` - SparkConf *SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"` - AwsAttributes *AwsAttributes `json:"aws_attributes,omitempty" url:"aws_attributes,omitempty"` - NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` - DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"` - SSHPublicKeys []string `json:"ssh_public_keys,omitempty" url:"ssh_public_keys,omitempty"` - CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"` - ClusterLogConf *ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"` - InitScripts []InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"` - SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"` - AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"` - EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"` - ClusterSource *AwsAvailability `json:"cluster_source,omitempty" url:"cluster_source,omitempty"` -} +package models + +type ClusterAttributes struct { + ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"` + SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"` + SparkConf *SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"` + AwsAttributes *AwsAttributes `json:"aws_attributes,omitempty" url:"aws_attributes,omitempty"` + NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` + DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"` + SSHPublicKeys []string `json:"ssh_public_keys,omitempty" url:"ssh_public_keys,omitempty"` + CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"` + ClusterLogConf *ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"` + InitScripts []InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"` + SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"` + AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"` + EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"` + ClusterSource *AwsAvailability `json:"cluster_source,omitempty" url:"cluster_source,omitempty"` +} diff --git a/aws/models/ClusterCloudProviderNodeInfo.go b/aws/models/ClusterCloudProviderNodeInfo.go index 1508bb8..257a440 100644 --- a/aws/models/ClusterCloudProviderNodeInfo.go +++ b/aws/models/ClusterCloudProviderNodeInfo.go @@ -1,7 +1,7 @@ -package models - -type ClusterCloudProviderNodeInfo struct { - Status *ClusterCloudProviderNodeStatus `json:"status,omitempty" url:"status,omitempty"` - AvailableCoreQuota int32 `json:"available_core_quota,omitempty" url:"available_core_quota,omitempty"` - TotalCoreQuota int32 `json:"total_core_quota,omitempty" url:"total_core_quota,omitempty"` -} +package models + +type ClusterCloudProviderNodeInfo struct { + Status *ClusterCloudProviderNodeStatus `json:"status,omitempty" url:"status,omitempty"` + AvailableCoreQuota int32 `json:"available_core_quota,omitempty" url:"available_core_quota,omitempty"` + TotalCoreQuota int32 `json:"total_core_quota,omitempty" url:"total_core_quota,omitempty"` +} diff --git a/aws/models/ClusterCloudProviderNodeStatus.go b/aws/models/ClusterCloudProviderNodeStatus.go index 3d40d57..1d23b56 100644 --- a/aws/models/ClusterCloudProviderNodeStatus.go +++ b/aws/models/ClusterCloudProviderNodeStatus.go @@ -1,8 +1,8 @@ -package models - -type ClusterCloudProviderNodeStatus string - -const ( - ClusterCloudProviderNodeStatusNotEnabledOnSubscription = "NotEnabledOnSubscription" - ClusterCloudProviderNodeStatusNotAvailableInRegion = "NotAvailableInRegion" -) +package models + +type ClusterCloudProviderNodeStatus string + +const ( + ClusterCloudProviderNodeStatusNotEnabledOnSubscription = "NotEnabledOnSubscription" + ClusterCloudProviderNodeStatusNotAvailableInRegion = "NotAvailableInRegion" +) diff --git a/aws/models/ClusterEvent.go b/aws/models/ClusterEvent.go index 26361ed..4c57ca7 100644 --- a/aws/models/ClusterEvent.go +++ b/aws/models/ClusterEvent.go @@ -1,8 +1,8 @@ -package models - -type ClusterEvent struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - Timestamp int64 `json:"timestamp,omitempty" url:"timestamp,omitempty"` - Type *ClusterEventType `json:"type,omitempty" url:"type,omitempty"` - Details *AwsAttributes `json:"details,omitempty" url:"details,omitempty"` -} +package models + +type ClusterEvent struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + Timestamp int64 `json:"timestamp,omitempty" url:"timestamp,omitempty"` + Type *ClusterEventType `json:"type,omitempty" url:"type,omitempty"` + Details *AwsAttributes `json:"details,omitempty" url:"details,omitempty"` +} diff --git a/aws/models/ClusterEventType.go b/aws/models/ClusterEventType.go index d9d550a..bed64b4 100644 --- a/aws/models/ClusterEventType.go +++ b/aws/models/ClusterEventType.go @@ -1,27 +1,27 @@ -package models - -type ClusterEventType string - -const ( - ClusterEventTypeCreating = "CREATING" - ClusterEventTypeDidNotExpandDisk = "DID_NOT_EXPAND_DISK" - ClusterEventTypeExpandedDisk = "EXPANDED_DISK" - ClusterEventTypeFailedToExpandDisk = "FAILED_TO_EXPAND_DISK" - ClusterEventTypeInitScriptStarting = "INIT_SCRIPTS_STARTING" - ClusterEventTypeInitScriptFinished = "INIT_SCRIPTS_FINISHED" - ClusterEventTypeStarting = "STARTING" - ClusterEventTypeRestarting = "RESTARTING" - ClusterEventTypeTerminating = "TERMINATING" - ClusterEventTypeEdited = "EDITED" - ClusterEventTypeRunning = "RUNNING" - ClusterEventTypeResizing = "RESIZING" - ClusterEventTypeUpsizeCompleted = "UPSIZE_COMPLETED" - ClusterEventTypeNodesLost = "NODES_LOST" - ClusterEventTypeDriverHealthy = "DRIVER_HEALTHY" - ClusterEventTypeDriverUnavailable = "DRIVER_UNAVAILABLE" - ClusterEventTypeSparkException = "SPARK_EXCEPTION" - ClusterEventTypeDriverNotResponding = "DRIVER_NOT_RESPONDING" - ClusterEventTypeDbfsDown = "DBFS_DOWN" - ClusterEventTypeMetastoreDown = "METASTORE_DOWN" - ClusterEventTypeAutoscalingStatsReport = "AUTOSCALING_STATS_REPORT" -) +package models + +type ClusterEventType string + +const ( + ClusterEventTypeCreating = "CREATING" + ClusterEventTypeDidNotExpandDisk = "DID_NOT_EXPAND_DISK" + ClusterEventTypeExpandedDisk = "EXPANDED_DISK" + ClusterEventTypeFailedToExpandDisk = "FAILED_TO_EXPAND_DISK" + ClusterEventTypeInitScriptStarting = "INIT_SCRIPTS_STARTING" + ClusterEventTypeInitScriptFinished = "INIT_SCRIPTS_FINISHED" + ClusterEventTypeStarting = "STARTING" + ClusterEventTypeRestarting = "RESTARTING" + ClusterEventTypeTerminating = "TERMINATING" + ClusterEventTypeEdited = "EDITED" + ClusterEventTypeRunning = "RUNNING" + ClusterEventTypeResizing = "RESIZING" + ClusterEventTypeUpsizeCompleted = "UPSIZE_COMPLETED" + ClusterEventTypeNodesLost = "NODES_LOST" + ClusterEventTypeDriverHealthy = "DRIVER_HEALTHY" + ClusterEventTypeDriverUnavailable = "DRIVER_UNAVAILABLE" + ClusterEventTypeSparkException = "SPARK_EXCEPTION" + ClusterEventTypeDriverNotResponding = "DRIVER_NOT_RESPONDING" + ClusterEventTypeDbfsDown = "DBFS_DOWN" + ClusterEventTypeMetastoreDown = "METASTORE_DOWN" + ClusterEventTypeAutoscalingStatsReport = "AUTOSCALING_STATS_REPORT" +) diff --git a/aws/models/ClusterInfo.go b/aws/models/ClusterInfo.go index e36ab94..6026899 100644 --- a/aws/models/ClusterInfo.go +++ b/aws/models/ClusterInfo.go @@ -1,37 +1,37 @@ -package models - -type ClusterInfo struct { - NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"` - AutoScale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"` - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"` - Driver *SparkNode `json:"driver,omitempty" url:"driver,omitempty"` - Executors []SparkNode `json:"executors,omitempty" url:"executors,omitempty"` - SparkContextID int64 `json:"spark_context_id,omitempty" url:"spark_context_id,omitempty"` - JdbcPort int32 `json:"jdbc_port,omitempty" url:"jdbc_port,omitempty"` - ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"` - SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"` - SparkConf *SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"` - AwsAttributes *AwsAttributes `json:"aws_attributes,omitempty" url:"aws_attributes,omitempty"` - NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` - DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"` - SSHPublicKeys []string `json:"ssh_public_keys,omitempty" url:"ssh_public_keys,omitempty"` - CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"` - ClusterLogConf *ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"` - InitScripts []InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"` - SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"` - AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"` - EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"` - ClusterSource *AwsAvailability `json:"cluster_source,omitempty" url:"cluster_source,omitempty"` - State *ClusterState `json:"state,omitempty" url:"state,omitempty"` - StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"` - StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"` - TerminateTime int64 `json:"terminate_time,omitempty" url:"terminate_time,omitempty"` - LastStateLossTime int64 `json:"last_state_loss_time,omitempty" url:"last_state_loss_time,omitempty"` - LastActivityTime int64 `json:"last_activity_time,omitempty" url:"last_activity_time,omitempty"` - ClusterMemoryMb int64 `json:"cluster_memory_mb,omitempty" url:"cluster_memory_mb,omitempty"` - ClusterCores float32 `json:"cluster_cores,omitempty" url:"cluster_cores,omitempty"` - DefaultTags []ClusterTag `json:"default_tags,omitempty" url:"default_tags,omitempty"` - ClusterLogStatus *LogSyncStatus `json:"cluster_log_status,omitempty" url:"cluster_log_status,omitempty"` - TerminationReason *S3StorageInfo `json:"termination_reason,omitempty" url:"termination_reason,omitempty"` -} +package models + +type ClusterInfo struct { + NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"` + AutoScale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"` + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"` + Driver *SparkNode `json:"driver,omitempty" url:"driver,omitempty"` + Executors []SparkNode `json:"executors,omitempty" url:"executors,omitempty"` + SparkContextID int64 `json:"spark_context_id,omitempty" url:"spark_context_id,omitempty"` + JdbcPort int32 `json:"jdbc_port,omitempty" url:"jdbc_port,omitempty"` + ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"` + SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"` + SparkConf *SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"` + AwsAttributes *AwsAttributes `json:"aws_attributes,omitempty" url:"aws_attributes,omitempty"` + NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` + DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"` + SSHPublicKeys []string `json:"ssh_public_keys,omitempty" url:"ssh_public_keys,omitempty"` + CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"` + ClusterLogConf *ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"` + InitScripts []InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"` + SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"` + AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"` + EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"` + ClusterSource *AwsAvailability `json:"cluster_source,omitempty" url:"cluster_source,omitempty"` + State *ClusterState `json:"state,omitempty" url:"state,omitempty"` + StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"` + StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"` + TerminateTime int64 `json:"terminate_time,omitempty" url:"terminate_time,omitempty"` + LastStateLossTime int64 `json:"last_state_loss_time,omitempty" url:"last_state_loss_time,omitempty"` + LastActivityTime int64 `json:"last_activity_time,omitempty" url:"last_activity_time,omitempty"` + ClusterMemoryMb int64 `json:"cluster_memory_mb,omitempty" url:"cluster_memory_mb,omitempty"` + ClusterCores float32 `json:"cluster_cores,omitempty" url:"cluster_cores,omitempty"` + DefaultTags []ClusterTag `json:"default_tags,omitempty" url:"default_tags,omitempty"` + ClusterLogStatus *LogSyncStatus `json:"cluster_log_status,omitempty" url:"cluster_log_status,omitempty"` + TerminationReason *S3StorageInfo `json:"termination_reason,omitempty" url:"termination_reason,omitempty"` +} diff --git a/aws/models/ClusterInstance.go b/aws/models/ClusterInstance.go index 7cea51a..c7cd031 100644 --- a/aws/models/ClusterInstance.go +++ b/aws/models/ClusterInstance.go @@ -1,6 +1,6 @@ -package models - -type ClusterInstance struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - SparkContextID string `json:"spark_context_id,omitempty" url:"spark_context_id,omitempty"` -} +package models + +type ClusterInstance struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + SparkContextID string `json:"spark_context_id,omitempty" url:"spark_context_id,omitempty"` +} diff --git a/aws/models/ClusterLibraryStatuses.go b/aws/models/ClusterLibraryStatuses.go index 69456d0..6392e00 100644 --- a/aws/models/ClusterLibraryStatuses.go +++ b/aws/models/ClusterLibraryStatuses.go @@ -1,6 +1,6 @@ -package models - -type ClusterLibraryStatuses struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - LibraryStatuses []LibraryFullStatus `json:"library_statuses,omitempty" url:"library_statuses,omitempty"` -} +package models + +type ClusterLibraryStatuses struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + LibraryStatuses []LibraryFullStatus `json:"library_statuses,omitempty" url:"library_statuses,omitempty"` +} diff --git a/aws/models/ClusterLogConf.go b/aws/models/ClusterLogConf.go index 9c7dcf4..24fbcce 100644 --- a/aws/models/ClusterLogConf.go +++ b/aws/models/ClusterLogConf.go @@ -1,6 +1,6 @@ -package models - -type ClusterLogConf struct { - Dbfs *DbfsStorageInfo `json:"dbfs,omitempty" url:"dbfs,omitempty"` - S3 *S3StorageInfo `json:"s3,omitempty" url:"s3,omitempty"` -} +package models + +type ClusterLogConf struct { + Dbfs *DbfsStorageInfo `json:"dbfs,omitempty" url:"dbfs,omitempty"` + S3 *S3StorageInfo `json:"s3,omitempty" url:"s3,omitempty"` +} diff --git a/aws/models/ClusterSize.go b/aws/models/ClusterSize.go index 74ac9bb..502cf5d 100644 --- a/aws/models/ClusterSize.go +++ b/aws/models/ClusterSize.go @@ -1,6 +1,6 @@ -package models - -type ClusterSize struct { - NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"` - Autoscale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"` -} +package models + +type ClusterSize struct { + NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"` + Autoscale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"` +} diff --git a/aws/models/ClusterSource.go b/aws/models/ClusterSource.go index 71d1680..7131a35 100644 --- a/aws/models/ClusterSource.go +++ b/aws/models/ClusterSource.go @@ -1,9 +1,9 @@ -package models - -type ClusterSource string - -const ( - ClusterSourceUI = "UI" - ClusterSourceJob = "JOB" - ClusterSourceAPI = "API" -) +package models + +type ClusterSource string + +const ( + ClusterSourceUI = "UI" + ClusterSourceJob = "JOB" + ClusterSourceAPI = "API" +) diff --git a/aws/models/ClusterSpec.go b/aws/models/ClusterSpec.go index 395f56c..abaac4c 100644 --- a/aws/models/ClusterSpec.go +++ b/aws/models/ClusterSpec.go @@ -1,7 +1,7 @@ -package models - -type ClusterSpec struct { - ExistingClusterID string `json:"existing_cluster_id,omitempty" url:"existing_cluster_id,omitempty"` - NewCluster *NewCluster `json:"new_cluster,omitempty" url:"new_cluster,omitempty"` - Libraries []Library `json:"libraries,omitempty" url:"libraries,omitempty"` -} +package models + +type ClusterSpec struct { + ExistingClusterID string `json:"existing_cluster_id,omitempty" url:"existing_cluster_id,omitempty"` + NewCluster *NewCluster `json:"new_cluster,omitempty" url:"new_cluster,omitempty"` + Libraries []Library `json:"libraries,omitempty" url:"libraries,omitempty"` +} diff --git a/aws/models/ClusterState.go b/aws/models/ClusterState.go index 4b4373f..398626c 100644 --- a/aws/models/ClusterState.go +++ b/aws/models/ClusterState.go @@ -1,13 +1,13 @@ -package models - -type ClusterState string - -const ( - ClusterStatePending = "PENDING" - ClusterStateRunning = "RUNNING" - ClusterStateRestarting = "RESTARTING" - ClusterStateResizing = "RESIZING" - ClusterStateTerminating = "TERMINATING" - ClusterStateError = "ERROR" - ClusterStateUnknown = "UNKNOWN" -) +package models + +type ClusterState string + +const ( + ClusterStatePending = "PENDING" + ClusterStateRunning = "RUNNING" + ClusterStateRestarting = "RESTARTING" + ClusterStateResizing = "RESIZING" + ClusterStateTerminating = "TERMINATING" + ClusterStateError = "ERROR" + ClusterStateUnknown = "UNKNOWN" +) diff --git a/aws/models/ClusterTag.go b/aws/models/ClusterTag.go index c37c004..2a2d227 100644 --- a/aws/models/ClusterTag.go +++ b/aws/models/ClusterTag.go @@ -1,6 +1,6 @@ -package models - -type ClusterTag struct { - Key string `json:"key,omitempty" url:"key,omitempty"` - Value string `json:"value,omitempty" url:"value,omitempty"` -} +package models + +type ClusterTag struct { + Key string `json:"key,omitempty" url:"key,omitempty"` + Value string `json:"value,omitempty" url:"value,omitempty"` +} diff --git a/aws/models/CronSchedule.go b/aws/models/CronSchedule.go index 9617d25..c6795ab 100644 --- a/aws/models/CronSchedule.go +++ b/aws/models/CronSchedule.go @@ -1,6 +1,6 @@ -package models - -type CronSchedule struct { - QuartzCronExpression string `json:"quartz_cron_expression,omitempty" url:"quartz_cron_expression,omitempty"` - TimezoneID string `json:"timezone_id,omitempty" url:"timezone_id,omitempty"` -} +package models + +type CronSchedule struct { + QuartzCronExpression string `json:"quartz_cron_expression,omitempty" url:"quartz_cron_expression,omitempty"` + TimezoneID string `json:"timezone_id,omitempty" url:"timezone_id,omitempty"` +} diff --git a/aws/models/DbfsStorageInfo.go b/aws/models/DbfsStorageInfo.go index 8d8525f..7875924 100644 --- a/aws/models/DbfsStorageInfo.go +++ b/aws/models/DbfsStorageInfo.go @@ -1,5 +1,5 @@ -package models - -type DbfsStorageInfo struct { - Destination string `json:"destination,omitempty" url:"destination,omitempty"` -} +package models + +type DbfsStorageInfo struct { + Destination string `json:"destination,omitempty" url:"destination,omitempty"` +} diff --git a/aws/models/DiskSpec.go b/aws/models/DiskSpec.go index aba9fb2..51a9752 100644 --- a/aws/models/DiskSpec.go +++ b/aws/models/DiskSpec.go @@ -1,7 +1,7 @@ -package models - -type DiskSpec struct { - DiskType *DiskType `json:"disk_type,omitempty" url:"disk_type,omitempty"` - DiskCount int32 `json:"disk_count,omitempty" url:"disk_count,omitempty"` - DiskSize int32 `json:"disk_size,omitempty" url:"disk_size,omitempty"` -} +package models + +type DiskSpec struct { + DiskType *DiskType `json:"disk_type,omitempty" url:"disk_type,omitempty"` + DiskCount int32 `json:"disk_count,omitempty" url:"disk_count,omitempty"` + DiskSize int32 `json:"disk_size,omitempty" url:"disk_size,omitempty"` +} diff --git a/aws/models/DiskType.go b/aws/models/DiskType.go index cf87a7f..4c410eb 100644 --- a/aws/models/DiskType.go +++ b/aws/models/DiskType.go @@ -1,5 +1,5 @@ -package models - -type DiskType struct { - EbsVolumeType EbsVolumeType `json:"ebs_volume_type,omitempty" url:"ebs_volume_type,omitempty"` -} +package models + +type DiskType struct { + EbsVolumeType EbsVolumeType `json:"ebs_volume_type,omitempty" url:"ebs_volume_type,omitempty"` +} diff --git a/aws/models/EbsVolumeType.go b/aws/models/EbsVolumeType.go index 928056f..c6257d6 100644 --- a/aws/models/EbsVolumeType.go +++ b/aws/models/EbsVolumeType.go @@ -1,8 +1,8 @@ -package models - -type EbsVolumeType string - -const ( - EbsVolumeTypeGeneralPurposeSsd = "GENERAL_PURPOSE_SSD" - EbsVolumeTypeThroughputOptimizedHdd = "THROUGHPUT_OPTIMIZED_HDD" -) +package models + +type EbsVolumeType string + +const ( + EbsVolumeTypeGeneralPurposeSsd = "GENERAL_PURPOSE_SSD" + EbsVolumeTypeThroughputOptimizedHdd = "THROUGHPUT_OPTIMIZED_HDD" +) diff --git a/aws/models/EventDetails.go b/aws/models/EventDetails.go index 84596a5..91886bc 100644 --- a/aws/models/EventDetails.go +++ b/aws/models/EventDetails.go @@ -1,10 +1,10 @@ -package models - -type EventDetails struct { - CurrentNumWorkers int32 `json:"current_num_workers,omitempty" url:"current_num_workers,omitempty"` - TargetNumWorkers int32 `json:"target_num_workers,omitempty" url:"target_num_workers,omitempty"` - PreviousAttributes *ClusterAttributes `json:"previous_attributes,omitempty" url:"previous_attributes,omitempty"` - Attributes *ClusterAttributes `json:"attributes,omitempty" url:"attributes,omitempty"` - PreviousClusterSize *ClusterSize `json:"previous_cluster_size,omitempty" url:"previous_cluster_size,omitempty"` - ClusterSize *ClusterSize `json:"cluster_size,omitempty" url:"cluster_size,omitempty"` -} +package models + +type EventDetails struct { + CurrentNumWorkers int32 `json:"current_num_workers,omitempty" url:"current_num_workers,omitempty"` + TargetNumWorkers int32 `json:"target_num_workers,omitempty" url:"target_num_workers,omitempty"` + PreviousAttributes *ClusterAttributes `json:"previous_attributes,omitempty" url:"previous_attributes,omitempty"` + Attributes *ClusterAttributes `json:"attributes,omitempty" url:"attributes,omitempty"` + PreviousClusterSize *ClusterSize `json:"previous_cluster_size,omitempty" url:"previous_cluster_size,omitempty"` + ClusterSize *ClusterSize `json:"cluster_size,omitempty" url:"cluster_size,omitempty"` +} diff --git a/aws/models/ExportFormat.go b/aws/models/ExportFormat.go index af811c0..493decf 100644 --- a/aws/models/ExportFormat.go +++ b/aws/models/ExportFormat.go @@ -1,10 +1,10 @@ -package models - -type ExportFormat string - -const ( - ExportFormatSource = "SOURCE" - ExportFormatHtml = "HTML" - ExportFormatJupyter = "JUPYTER" - ExportFormatDbc = "DBC" -) +package models + +type ExportFormat string + +const ( + ExportFormatSource = "SOURCE" + ExportFormatHtml = "HTML" + ExportFormatJupyter = "JUPYTER" + ExportFormatDbc = "DBC" +) diff --git a/aws/models/FileInfo.go b/aws/models/FileInfo.go index 26d33fa..2875616 100644 --- a/aws/models/FileInfo.go +++ b/aws/models/FileInfo.go @@ -1,7 +1,7 @@ -package models - -type FileInfo struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - IsDir bool `json:"is_dir,omitempty" url:"is_dir,omitempty"` - FileSize int64 `json:"file_size,omitempty" url:"file_size,omitempty"` -} +package models + +type FileInfo struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + IsDir bool `json:"is_dir,omitempty" url:"is_dir,omitempty"` + FileSize int64 `json:"file_size,omitempty" url:"file_size,omitempty"` +} diff --git a/aws/models/InitScriptInfo.go b/aws/models/InitScriptInfo.go index e9a6ba4..afdf7cf 100644 --- a/aws/models/InitScriptInfo.go +++ b/aws/models/InitScriptInfo.go @@ -1,6 +1,6 @@ -package models - -type InitScriptInfo struct { - Dbfs *DbfsStorageInfo `json:"dbfs,omitempty" url:"dbfs,omitempty"` - S3 *S3StorageInfo `json:"s3,omitempty" url:"s3,omitempty"` -} +package models + +type InitScriptInfo struct { + Dbfs *DbfsStorageInfo `json:"dbfs,omitempty" url:"dbfs,omitempty"` + S3 *S3StorageInfo `json:"s3,omitempty" url:"s3,omitempty"` +} diff --git a/aws/models/InstancePoolAndStats.go b/aws/models/InstancePoolAndStats.go index 5913c4d..f457979 100644 --- a/aws/models/InstancePoolAndStats.go +++ b/aws/models/InstancePoolAndStats.go @@ -1,18 +1,18 @@ -package models - -type InstancePoolAndStats struct { - InstancePoolName string `json:"instance_pool_name,omitempty" url:"instance_pool_name,omitempty"` - MinIdleInstances int32 `json:"min_idle_instances,omitempty" url:"min_idle_instances,omitempty"` - MaxCapacity int32 `json:"max_capacity,omitempty" url:"max_capacity,omitempty"` - AwsAttributes InstancePoolAwsAttributes `json:"aws_attributes,omitempty" url:"aws_attributes,omitempty"` - NodetypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` - CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"` - IdleInstanceAutoterminationMinutes int32 `json:"idle_instance_autotermination_minutes,omitempty" url:"idle_instance_autotermination_minutes,omitempty"` - EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"` - DiskSpec DiskSpec `json:"disk_spec,omitempty" url:"disk_spec,omitempty"` - PreloadedSparkVersions []string `json:"preloaded_spark_versions,omitempty" url:"preloaded_spark_versions,omitempty"` - InstancePoolID string `json:"instance_pool_id,omitempty" url:"instance_pool_id,omitempty"` - DefaultTags []ClusterTag `json:"default_tags,omitempty" url:"default_tags,omitempty"` - State InstancePoolState `json:"state,omitempty" url:"state,omitempty"` - Stats InstancePoolStats `json:"stats,omitempty" url:"stats,omitempty"` -} +package models + +type InstancePoolAndStats struct { + InstancePoolName string `json:"instance_pool_name,omitempty" url:"instance_pool_name,omitempty"` + MinIdleInstances int32 `json:"min_idle_instances,omitempty" url:"min_idle_instances,omitempty"` + MaxCapacity int32 `json:"max_capacity,omitempty" url:"max_capacity,omitempty"` + AwsAttributes InstancePoolAwsAttributes `json:"aws_attributes,omitempty" url:"aws_attributes,omitempty"` + NodetypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` + CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"` + IdleInstanceAutoterminationMinutes int32 `json:"idle_instance_autotermination_minutes,omitempty" url:"idle_instance_autotermination_minutes,omitempty"` + EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"` + DiskSpec DiskSpec `json:"disk_spec,omitempty" url:"disk_spec,omitempty"` + PreloadedSparkVersions []string `json:"preloaded_spark_versions,omitempty" url:"preloaded_spark_versions,omitempty"` + InstancePoolID string `json:"instance_pool_id,omitempty" url:"instance_pool_id,omitempty"` + DefaultTags []ClusterTag `json:"default_tags,omitempty" url:"default_tags,omitempty"` + State InstancePoolState `json:"state,omitempty" url:"state,omitempty"` + Stats InstancePoolStats `json:"stats,omitempty" url:"stats,omitempty"` +} diff --git a/aws/models/InstancePoolAwsAttributes.go b/aws/models/InstancePoolAwsAttributes.go index cfbbf29..dfcf1d4 100644 --- a/aws/models/InstancePoolAwsAttributes.go +++ b/aws/models/InstancePoolAwsAttributes.go @@ -1,7 +1,7 @@ -package models - -type InstancePoolAwsAttributes struct { - Availability AwsAvailability `json:"availability,omitempty" url:"availability,omitempty"` - ZoneID string `json:"zone_id,omitempty" url:"zone_id,omitempty"` - SpotBidPricePercent int32 `json:"spot_bid_price_percent,omitempty" url:"spot_bid_price_percent,omitempty"` -} +package models + +type InstancePoolAwsAttributes struct { + Availability AwsAvailability `json:"availability,omitempty" url:"availability,omitempty"` + ZoneID string `json:"zone_id,omitempty" url:"zone_id,omitempty"` + SpotBidPricePercent int32 `json:"spot_bid_price_percent,omitempty" url:"spot_bid_price_percent,omitempty"` +} diff --git a/aws/models/InstancePoolState.go b/aws/models/InstancePoolState.go index 0c8be17..f00e7cd 100644 --- a/aws/models/InstancePoolState.go +++ b/aws/models/InstancePoolState.go @@ -1,8 +1,8 @@ -package models - -type InstancePoolState string - -const ( - InstancePoolStateActive = "ACTIVE" - InstancePoolStateDeleted = "DELETED" -) +package models + +type InstancePoolState string + +const ( + InstancePoolStateActive = "ACTIVE" + InstancePoolStateDeleted = "DELETED" +) diff --git a/aws/models/InstancePoolStats.go b/aws/models/InstancePoolStats.go index 3baf900..a394f24 100644 --- a/aws/models/InstancePoolStats.go +++ b/aws/models/InstancePoolStats.go @@ -1,8 +1,8 @@ -package models - -type InstancePoolStats struct { - UsedCount int32 `json:"used_count,omitempty" url:"used_count,omitempty"` - IdleCount int32 `json:"idle_count,omitempty" url:"idle_count,omitempty"` - PendingUsedCount int32 `json:"pending_used_count,omitempty" url:"pending_used_count,omitempty"` - PendingIdleCount int32 `json:"pending_idle_count,omitempty" url:"pending_idle_count,omitempty"` -} +package models + +type InstancePoolStats struct { + UsedCount int32 `json:"used_count,omitempty" url:"used_count,omitempty"` + IdleCount int32 `json:"idle_count,omitempty" url:"idle_count,omitempty"` + PendingUsedCount int32 `json:"pending_used_count,omitempty" url:"pending_used_count,omitempty"` + PendingIdleCount int32 `json:"pending_idle_count,omitempty" url:"pending_idle_count,omitempty"` +} diff --git a/aws/models/InstanceProfile.go b/aws/models/InstanceProfile.go index b0fdeb1..34d02ac 100644 --- a/aws/models/InstanceProfile.go +++ b/aws/models/InstanceProfile.go @@ -1,5 +1,5 @@ -package models - -type InstanceProfile struct { - InstanceProfileArn string `json:"instance_profile_arn,omitempty" url:"instance_profile_arn,omitempty"` -} +package models + +type InstanceProfile struct { + InstanceProfileArn string `json:"instance_profile_arn,omitempty" url:"instance_profile_arn,omitempty"` +} diff --git a/aws/models/Job.go b/aws/models/Job.go index 925249d..a3ac135 100644 --- a/aws/models/Job.go +++ b/aws/models/Job.go @@ -1,8 +1,8 @@ -package models - -type Job struct { - JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` - CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"` - Settings *JobSettings `json:"settings,omitempty" url:"settings,omitempty"` - CreatedTime int64 `json:"created_time,omitempty" url:"created_time,omitempty"` -} +package models + +type Job struct { + JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` + CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"` + Settings *JobSettings `json:"settings,omitempty" url:"settings,omitempty"` + CreatedTime int64 `json:"created_time,omitempty" url:"created_time,omitempty"` +} diff --git a/aws/models/JobEmailNotifications.go b/aws/models/JobEmailNotifications.go index 141eccc..5d79719 100644 --- a/aws/models/JobEmailNotifications.go +++ b/aws/models/JobEmailNotifications.go @@ -1,8 +1,8 @@ -package models - -type JobEmailNotifications struct { - OnStart []string `json:"on_start,omitempty" url:"on_start,omitempty"` - OnSuccess []string `json:"on_success,omitempty" url:"on_success,omitempty"` - OnFailure []string `json:"on_failure,omitempty" url:"on_failure,omitempty"` - NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty" url:"no_alert_for_skipped_runs,omitempty"` -} +package models + +type JobEmailNotifications struct { + OnStart []string `json:"on_start,omitempty" url:"on_start,omitempty"` + OnSuccess []string `json:"on_success,omitempty" url:"on_success,omitempty"` + OnFailure []string `json:"on_failure,omitempty" url:"on_failure,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty" url:"no_alert_for_skipped_runs,omitempty"` +} diff --git a/aws/models/JobSettings.go b/aws/models/JobSettings.go index 10936dd..e5dd0cc 100644 --- a/aws/models/JobSettings.go +++ b/aws/models/JobSettings.go @@ -1,19 +1,19 @@ -package models - -type JobSettings struct { - ExistingClusterID string `json:"existing_cluster_id,omitempty" url:"existing_cluster_id,omitempty"` - NewCluster *NewCluster `json:"new_cluster,omitempty" url:"new_cluster,omitempty"` - NotebookTask *NotebookTask `json:"notebook_task,omitempty" url:"notebook_task,omitempty"` - SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" url:"spark_jar_task,omitempty"` - SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" url:"spark_python_task,omitempty"` - SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" url:"spark_submit_task,omitempty"` - Name string `json:"name,omitempty" url:"name,omitempty"` - Libraries []Library `json:"libraries,omitempty" url:"libraries,omitempty"` - EmailNotifications *JobEmailNotifications `json:"email_notifications,omitempty" url:"email_notifications,omitempty"` - TimeoutSeconds int32 `json:"timeout_seconds,omitempty" url:"timeout_seconds,omitempty"` - MaxRetries int32 `json:"max_retries,omitempty" url:"max_retries,omitempty"` - MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty" url:"min_retry_interval_millis,omitempty"` - RetryOnTimeout bool `json:"retry_on_timeout,omitempty" url:"retry_on_timeout,omitempty"` - Schedule *CronSchedule `json:"schedule,omitempty" url:"schedule,omitempty"` - MaxConcurrentRuns int32 `json:"max_concurrent_runs,omitempty" url:"max_concurrent_runs,omitempty"` -} +package models + +type JobSettings struct { + ExistingClusterID string `json:"existing_cluster_id,omitempty" url:"existing_cluster_id,omitempty"` + NewCluster *NewCluster `json:"new_cluster,omitempty" url:"new_cluster,omitempty"` + NotebookTask *NotebookTask `json:"notebook_task,omitempty" url:"notebook_task,omitempty"` + SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" url:"spark_jar_task,omitempty"` + SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" url:"spark_python_task,omitempty"` + SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" url:"spark_submit_task,omitempty"` + Name string `json:"name,omitempty" url:"name,omitempty"` + Libraries []Library `json:"libraries,omitempty" url:"libraries,omitempty"` + EmailNotifications *JobEmailNotifications `json:"email_notifications,omitempty" url:"email_notifications,omitempty"` + TimeoutSeconds int32 `json:"timeout_seconds,omitempty" url:"timeout_seconds,omitempty"` + MaxRetries int32 `json:"max_retries,omitempty" url:"max_retries,omitempty"` + MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty" url:"min_retry_interval_millis,omitempty"` + RetryOnTimeout bool `json:"retry_on_timeout,omitempty" url:"retry_on_timeout,omitempty"` + Schedule *CronSchedule `json:"schedule,omitempty" url:"schedule,omitempty"` + MaxConcurrentRuns int32 `json:"max_concurrent_runs,omitempty" url:"max_concurrent_runs,omitempty"` +} diff --git a/aws/models/JobTask.go b/aws/models/JobTask.go index f75c890..3a24a03 100644 --- a/aws/models/JobTask.go +++ b/aws/models/JobTask.go @@ -1,8 +1,8 @@ -package models - -type JobTask struct { - NotebookTask *NotebookTask `json:"notebook_task,omitempty" url:"notebook_task,omitempty"` - SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" url:"spark_jar_task,omitempty"` - SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" url:"spark_python_task,omitempty"` - SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" url:"spark_submit_task,omitempty"` -} +package models + +type JobTask struct { + NotebookTask *NotebookTask `json:"notebook_task,omitempty" url:"notebook_task,omitempty"` + SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" url:"spark_jar_task,omitempty"` + SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" url:"spark_python_task,omitempty"` + SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" url:"spark_submit_task,omitempty"` +} diff --git a/aws/models/Language.go b/aws/models/Language.go index 7f1b9d5..e0a9561 100644 --- a/aws/models/Language.go +++ b/aws/models/Language.go @@ -1,10 +1,10 @@ -package models - -type Language string - -const ( - LanguageScala = "SCALA" - LanguagePython = "PYTHON" - LanguageSQL = "SQL" - LanguageR = "R" -) +package models + +type Language string + +const ( + LanguageScala = "SCALA" + LanguagePython = "PYTHON" + LanguageSQL = "SQL" + LanguageR = "R" +) diff --git a/aws/models/Library.go b/aws/models/Library.go index 0cb0800..34f8a8f 100644 --- a/aws/models/Library.go +++ b/aws/models/Library.go @@ -1,10 +1,10 @@ -package models - -type Library struct { - Jar string `json:"jar,omitempty" url:"jar,omitempty"` - Egg string `json:"egg,omitempty" url:"egg,omitempty"` - Whl string `json:"whl,omitempty" url:"whl,omitempty"` - Pypi *PythonPyPiLibrary `json:"pypi,omitempty" url:"pypi,omitempty"` - Maven *MavenLibrary `json:"maven,omitempty" url:"maven,omitempty"` - Cran *RCranLibrary `json:"cran,omitempty" url:"cran,omitempty"` -} +package models + +type Library struct { + Jar string `json:"jar,omitempty" url:"jar,omitempty"` + Egg string `json:"egg,omitempty" url:"egg,omitempty"` + Whl string `json:"whl,omitempty" url:"whl,omitempty"` + Pypi *PythonPyPiLibrary `json:"pypi,omitempty" url:"pypi,omitempty"` + Maven *MavenLibrary `json:"maven,omitempty" url:"maven,omitempty"` + Cran *RCranLibrary `json:"cran,omitempty" url:"cran,omitempty"` +} diff --git a/aws/models/LibraryFullStatus.go b/aws/models/LibraryFullStatus.go index 4edca20..5f2c97b 100644 --- a/aws/models/LibraryFullStatus.go +++ b/aws/models/LibraryFullStatus.go @@ -1,8 +1,8 @@ -package models - -type LibraryFullStatus struct { - Library *Library `json:"library,omitempty" url:"library,omitempty"` - Status *LibraryInstallStatus `json:"status,omitempty" url:"status,omitempty"` - Messages []string `json:"messages,omitempty" url:"messages,omitempty"` - IsLibraryForAllClusters bool `json:"is_library_for_all_clusters,omitempty" url:"is_library_for_all_clusters,omitempty"` -} +package models + +type LibraryFullStatus struct { + Library *Library `json:"library,omitempty" url:"library,omitempty"` + Status *LibraryInstallStatus `json:"status,omitempty" url:"status,omitempty"` + Messages []string `json:"messages,omitempty" url:"messages,omitempty"` + IsLibraryForAllClusters bool `json:"is_library_for_all_clusters,omitempty" url:"is_library_for_all_clusters,omitempty"` +} diff --git a/aws/models/LibraryInstallStatus.go b/aws/models/LibraryInstallStatus.go index bd84e61..2accf5a 100644 --- a/aws/models/LibraryInstallStatus.go +++ b/aws/models/LibraryInstallStatus.go @@ -1,12 +1,12 @@ -package models - -type LibraryInstallStatus string - -const ( - LibraryInstallStatusPending = "PENDING" - LibraryInstallStatusResolving = "RESOLVING" - LibraryInstallStatusInstalling = "INSTALLING" - LibraryInstallStatusInstalled = "INSTALLED" - LibraryInstallStatusFailed = "FAILED" - LibraryInstallStatusUninstallOnRestart = "UNINSTALL_ON_RESTART" -) +package models + +type LibraryInstallStatus string + +const ( + LibraryInstallStatusPending = "PENDING" + LibraryInstallStatusResolving = "RESOLVING" + LibraryInstallStatusInstalling = "INSTALLING" + LibraryInstallStatusInstalled = "INSTALLED" + LibraryInstallStatusFailed = "FAILED" + LibraryInstallStatusUninstallOnRestart = "UNINSTALL_ON_RESTART" +) diff --git a/aws/models/ListOrder.go b/aws/models/ListOrder.go index 831ddd9..3e9c17c 100644 --- a/aws/models/ListOrder.go +++ b/aws/models/ListOrder.go @@ -1,8 +1,8 @@ -package models - -type ListOrder string - -const ( - ListOrderDesc = "DESC" - ListOrderAsc = "ASC" -) +package models + +type ListOrder string + +const ( + ListOrderDesc = "DESC" + ListOrderAsc = "ASC" +) diff --git a/aws/models/LogSyncStatus.go b/aws/models/LogSyncStatus.go index da42624..f2bb45e 100644 --- a/aws/models/LogSyncStatus.go +++ b/aws/models/LogSyncStatus.go @@ -1,6 +1,6 @@ -package models - -type LogSyncStatus struct { - LastAttempted int64 `json:"last_attempted,omitempty" url:"last_attempted,omitempty"` - LastException string `json:"last_exception,omitempty" url:"last_exception,omitempty"` -} +package models + +type LogSyncStatus struct { + LastAttempted int64 `json:"last_attempted,omitempty" url:"last_attempted,omitempty"` + LastException string `json:"last_exception,omitempty" url:"last_exception,omitempty"` +} diff --git a/aws/models/MavenLibrary.go b/aws/models/MavenLibrary.go index 7f5e900..0270a8a 100644 --- a/aws/models/MavenLibrary.go +++ b/aws/models/MavenLibrary.go @@ -1,7 +1,7 @@ -package models - -type MavenLibrary struct { - Coordinates string `json:"coordinates,omitempty" url:"coordinates,omitempty"` - Repo string `json:"repo,omitempty" url:"repo,omitempty"` - Exclusions []string `json:"exclusions,omitempty" url:"exclusions,omitempty"` -} +package models + +type MavenLibrary struct { + Coordinates string `json:"coordinates,omitempty" url:"coordinates,omitempty"` + Repo string `json:"repo,omitempty" url:"repo,omitempty"` + Exclusions []string `json:"exclusions,omitempty" url:"exclusions,omitempty"` +} diff --git a/aws/models/NewCluster.go b/aws/models/NewCluster.go index 8228d2f..6760974 100644 --- a/aws/models/NewCluster.go +++ b/aws/models/NewCluster.go @@ -1,18 +1,18 @@ -package models - -type NewCluster struct { - NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"` - Autoscale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"` - ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"` - SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"` - SparkConf *SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"` - AwsAttributes *AwsAttributes `json:"aws_attributes,omitempty" url:"aws_attributes,omitempty"` - NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` - DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"` - SSHPublicKeys []string `json:"ssh_public_keys,omitempty" url:"ssh_public_keys,omitempty"` - CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"` - ClusterLogConf *ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"` - InitScripts []InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"` - SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"` - EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"` -} +package models + +type NewCluster struct { + NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"` + Autoscale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"` + ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"` + SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"` + SparkConf *SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"` + AwsAttributes *AwsAttributes `json:"aws_attributes,omitempty" url:"aws_attributes,omitempty"` + NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` + DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"` + SSHPublicKeys []string `json:"ssh_public_keys,omitempty" url:"ssh_public_keys,omitempty"` + CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"` + ClusterLogConf *ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"` + InitScripts []InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"` + SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"` + EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"` +} diff --git a/aws/models/NodeType.go b/aws/models/NodeType.go index 7b49896..bf28ece 100644 --- a/aws/models/NodeType.go +++ b/aws/models/NodeType.go @@ -1,11 +1,11 @@ -package models - -type NodeType struct { - NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` - MemoryMb int32 `json:"memory_mb,omitempty" url:"memory_mb,omitempty"` - NumCores float32 `json:"num_cores,omitempty" url:"num_cores,omitempty"` - Description string `json:"description,omitempty" url:"description,omitempty"` - InstanceTypeID string `json:"instance_type_id,omitempty" url:"instance_type_id,omitempty"` - IsDeprecated bool `json:"is_deprecated,omitempty" url:"is_deprecated,omitempty"` - NodeInfo *ClusterCloudProviderNodeInfo `json:"node_info,omitempty" url:"node_info,omitempty"` -} +package models + +type NodeType struct { + NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` + MemoryMb int32 `json:"memory_mb,omitempty" url:"memory_mb,omitempty"` + NumCores float32 `json:"num_cores,omitempty" url:"num_cores,omitempty"` + Description string `json:"description,omitempty" url:"description,omitempty"` + InstanceTypeID string `json:"instance_type_id,omitempty" url:"instance_type_id,omitempty"` + IsDeprecated bool `json:"is_deprecated,omitempty" url:"is_deprecated,omitempty"` + NodeInfo *ClusterCloudProviderNodeInfo `json:"node_info,omitempty" url:"node_info,omitempty"` +} diff --git a/aws/models/NotebookOutput.go b/aws/models/NotebookOutput.go index 78628a6..62c4dab 100644 --- a/aws/models/NotebookOutput.go +++ b/aws/models/NotebookOutput.go @@ -1,6 +1,6 @@ -package models - -type NotebookOutput struct { - Result string `json:"result,omitempty" url:"result,omitempty"` - Truncated bool `json:"truncated,omitempty" url:"truncated,omitempty"` -} +package models + +type NotebookOutput struct { + Result string `json:"result,omitempty" url:"result,omitempty"` + Truncated bool `json:"truncated,omitempty" url:"truncated,omitempty"` +} diff --git a/aws/models/NotebookTask.go b/aws/models/NotebookTask.go index 1776143..b839a0b 100644 --- a/aws/models/NotebookTask.go +++ b/aws/models/NotebookTask.go @@ -1,6 +1,6 @@ -package models - -type NotebookTask struct { - NotebookPath string `json:"notebook_path,omitempty" url:"notebook_path,omitempty"` - BaseParameters map[string]string `json:"base_parameters,omitempty" url:"base_parameters,omitempty"` -} +package models + +type NotebookTask struct { + NotebookPath string `json:"notebook_path,omitempty" url:"notebook_path,omitempty"` + BaseParameters map[string]string `json:"base_parameters,omitempty" url:"base_parameters,omitempty"` +} diff --git a/aws/models/ObjectInfo.go b/aws/models/ObjectInfo.go index 5033f11..cee81f2 100644 --- a/aws/models/ObjectInfo.go +++ b/aws/models/ObjectInfo.go @@ -1,7 +1,7 @@ -package models - -type ObjectInfo struct { - ObjectType *ObjectType `json:"object_type,omitempty" url:"object_type,omitempty"` - Path string `json:"path,omitempty" url:"path,omitempty"` - Language *Language `json:"language,omitempty" url:"language,omitempty"` -} +package models + +type ObjectInfo struct { + ObjectType *ObjectType `json:"object_type,omitempty" url:"object_type,omitempty"` + Path string `json:"path,omitempty" url:"path,omitempty"` + Language *Language `json:"language,omitempty" url:"language,omitempty"` +} diff --git a/aws/models/ObjectType.go b/aws/models/ObjectType.go index 4e3240d..03287cf 100644 --- a/aws/models/ObjectType.go +++ b/aws/models/ObjectType.go @@ -1,9 +1,9 @@ -package models - -type ObjectType string - -const ( - ObjectTypeNotebook = "NOTEBOOK" - ObjectTypeDirectory = "DIRECTORY" - ObjectTypeLibrary = "LIBRARY" -) +package models + +type ObjectType string + +const ( + ObjectTypeNotebook = "NOTEBOOK" + ObjectTypeDirectory = "DIRECTORY" + ObjectTypeLibrary = "LIBRARY" +) diff --git a/aws/models/ParamPair.go b/aws/models/ParamPair.go index c023169..029f8ab 100644 --- a/aws/models/ParamPair.go +++ b/aws/models/ParamPair.go @@ -1,6 +1,6 @@ -package models - -type ParamPair struct { - Key string `json:"key,omitempty" url:"key,omitempty"` - Value string `json:"value,omitempty" url:"value,omitempty"` -} +package models + +type ParamPair struct { + Key string `json:"key,omitempty" url:"key,omitempty"` + Value string `json:"value,omitempty" url:"value,omitempty"` +} diff --git a/aws/models/ParameterPair.go b/aws/models/ParameterPair.go index be47579..6914560 100644 --- a/aws/models/ParameterPair.go +++ b/aws/models/ParameterPair.go @@ -1,6 +1,6 @@ -package models - -type ParameterPair struct { - Key string `json:"key,omitempty" url:"key,omitempty"` - Value string `json:"value,omitempty" url:"value,omitempty"` -} +package models + +type ParameterPair struct { + Key string `json:"key,omitempty" url:"key,omitempty"` + Value string `json:"value,omitempty" url:"value,omitempty"` +} diff --git a/aws/models/PrincipalName.go b/aws/models/PrincipalName.go index 9b5346e..c1d5023 100644 --- a/aws/models/PrincipalName.go +++ b/aws/models/PrincipalName.go @@ -1,6 +1,6 @@ -package models - -type PrincipalName struct { - UserName string `json:"user_name,omitempty" url:"user_name,omitempty"` - GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` -} +package models + +type PrincipalName struct { + UserName string `json:"user_name,omitempty" url:"user_name,omitempty"` + GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` +} diff --git a/aws/models/PublicTokenInfo.go b/aws/models/PublicTokenInfo.go index 9b62146..3d29372 100644 --- a/aws/models/PublicTokenInfo.go +++ b/aws/models/PublicTokenInfo.go @@ -1,8 +1,8 @@ -package models - -type PublicTokenInfo struct { - TokenID string `json:"token_id,omitempty" url:"token_id,omitempty"` - CreationTime int64 `json:"creation_time,omitempty" url:"creation_time,omitempty"` - ExpiryTime int64 `json:"expiry_time,omitempty" url:"expiry_time,omitempty"` - Comment string `json:"comment,omitempty" url:"comment,omitempty"` -} +package models + +type PublicTokenInfo struct { + TokenID string `json:"token_id,omitempty" url:"token_id,omitempty"` + CreationTime int64 `json:"creation_time,omitempty" url:"creation_time,omitempty"` + ExpiryTime int64 `json:"expiry_time,omitempty" url:"expiry_time,omitempty"` + Comment string `json:"comment,omitempty" url:"comment,omitempty"` +} diff --git a/aws/models/PythonPyPiLibrary.go b/aws/models/PythonPyPiLibrary.go index 0ade9b0..eacc27c 100644 --- a/aws/models/PythonPyPiLibrary.go +++ b/aws/models/PythonPyPiLibrary.go @@ -1,6 +1,6 @@ -package models - -type PythonPyPiLibrary struct { - Package string `json:"package,omitempty" url:"package,omitempty"` - Repo string `json:"repo,omitempty" url:"repo,omitempty"` -} +package models + +type PythonPyPiLibrary struct { + Package string `json:"package,omitempty" url:"package,omitempty"` + Repo string `json:"repo,omitempty" url:"repo,omitempty"` +} diff --git a/aws/models/RCranLibrary.go b/aws/models/RCranLibrary.go index e1ded03..f1c90ca 100644 --- a/aws/models/RCranLibrary.go +++ b/aws/models/RCranLibrary.go @@ -1,6 +1,6 @@ -package models - -type RCranLibrary struct { - Package string `json:"package,omitempty" url:"package,omitempty"` - Repo string `json:"repo,omitempty" url:"repo,omitempty"` -} +package models + +type RCranLibrary struct { + Package string `json:"package,omitempty" url:"package,omitempty"` + Repo string `json:"repo,omitempty" url:"repo,omitempty"` +} diff --git a/aws/models/ResizeCause.go b/aws/models/ResizeCause.go index a83ab2f..0370f8b 100644 --- a/aws/models/ResizeCause.go +++ b/aws/models/ResizeCause.go @@ -1,9 +1,9 @@ -package models - -type ResizeCause string - -const ( - ResizeCauseAutoscale = "AUTOSCALE" - ResizeCauseUserRequest = "USER_REQUEST" - ResizeCauseAutorecovery = "AUTORECOVERY" -) +package models + +type ResizeCause string + +const ( + ResizeCauseAutoscale = "AUTOSCALE" + ResizeCauseUserRequest = "USER_REQUEST" + ResizeCauseAutorecovery = "AUTORECOVERY" +) diff --git a/aws/models/Run.go b/aws/models/Run.go index 8c764a1..2674d55 100644 --- a/aws/models/Run.go +++ b/aws/models/Run.go @@ -1,20 +1,20 @@ -package models - -type Run struct { - JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` - RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` - CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"` - NumberInJob int64 `json:"number_in_job,omitempty" url:"number_in_job,omitempty"` - OriginalAttemptRunID int64 `json:"original_attempt_run_id,omitempty" url:"original_attempt_run_id,omitempty"` - State *RunState `json:"state,omitempty" url:"state,omitempty"` - Schedule *CronSchedule `json:"schedule,omitempty" url:"schedule,omitempty"` - Task *JobTask `json:"task,omitempty" url:"task,omitempty"` - ClusterSpec *ClusterSpec `json:"cluster_spec,omitempty" url:"cluster_spec,omitempty"` - ClusterInstance *ClusterInstance `json:"cluster_instance,omitempty" url:"cluster_instance,omitempty"` - OverridingParameters *RunParameters `json:"overriding_parameters,omitempty" url:"overriding_parameters,omitempty"` - StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"` - SetupDuration int64 `json:"setup_duration,omitempty" url:"setup_duration,omitempty"` - ExecutionDuration int64 `json:"execution_duration,omitempty" url:"execution_duration,omitempty"` - CleanupDuration int64 `json:"cleanup_duration,omitempty" url:"cleanup_duration,omitempty"` - Trigger *TriggerType `json:"trigger,omitempty" url:"trigger,omitempty"` -} +package models + +type Run struct { + JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` + RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` + CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"` + NumberInJob int64 `json:"number_in_job,omitempty" url:"number_in_job,omitempty"` + OriginalAttemptRunID int64 `json:"original_attempt_run_id,omitempty" url:"original_attempt_run_id,omitempty"` + State *RunState `json:"state,omitempty" url:"state,omitempty"` + Schedule *CronSchedule `json:"schedule,omitempty" url:"schedule,omitempty"` + Task *JobTask `json:"task,omitempty" url:"task,omitempty"` + ClusterSpec *ClusterSpec `json:"cluster_spec,omitempty" url:"cluster_spec,omitempty"` + ClusterInstance *ClusterInstance `json:"cluster_instance,omitempty" url:"cluster_instance,omitempty"` + OverridingParameters *RunParameters `json:"overriding_parameters,omitempty" url:"overriding_parameters,omitempty"` + StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"` + SetupDuration int64 `json:"setup_duration,omitempty" url:"setup_duration,omitempty"` + ExecutionDuration int64 `json:"execution_duration,omitempty" url:"execution_duration,omitempty"` + CleanupDuration int64 `json:"cleanup_duration,omitempty" url:"cleanup_duration,omitempty"` + Trigger *TriggerType `json:"trigger,omitempty" url:"trigger,omitempty"` +} diff --git a/aws/models/RunLifeCycleState.go b/aws/models/RunLifeCycleState.go index 0208ce3..8877768 100644 --- a/aws/models/RunLifeCycleState.go +++ b/aws/models/RunLifeCycleState.go @@ -1,12 +1,12 @@ -package models - -type RunLifeCycleState string - -const ( - RunLifeCycleStatePending = "PENDING" - RunLifeCycleStateRunning = "RUNNING" - RunLifeCycleStateTerminating = "TERMINATING" - RunLifeCycleStateTerminated = "TERMINATED" - RunLifeCycleStateSkipped = "SKIPPED" - RunLifeCycleStateInternalError = "INTERNAL_ERROR" -) +package models + +type RunLifeCycleState string + +const ( + RunLifeCycleStatePending = "PENDING" + RunLifeCycleStateRunning = "RUNNING" + RunLifeCycleStateTerminating = "TERMINATING" + RunLifeCycleStateTerminated = "TERMINATED" + RunLifeCycleStateSkipped = "SKIPPED" + RunLifeCycleStateInternalError = "INTERNAL_ERROR" +) diff --git a/aws/models/RunParameters.go b/aws/models/RunParameters.go index c3e5de4..4097e63 100644 --- a/aws/models/RunParameters.go +++ b/aws/models/RunParameters.go @@ -1,8 +1,8 @@ -package models - -type RunParameters struct { - JarParams []string `json:"jar_params,omitempty" url:"jar_params,omitempty"` - NotebookParams map[string]string `json:"notebook_params,omitempty" url:"notebook_params,omitempty"` - PythonParams []string `json:"python_params,omitempty" url:"python_params,omitempty"` - SparkSubmitParams []string `json:"spark_submit_params,omitempty" url:"spark_submit_params,omitempty"` -} +package models + +type RunParameters struct { + JarParams []string `json:"jar_params,omitempty" url:"jar_params,omitempty"` + NotebookParams map[string]string `json:"notebook_params,omitempty" url:"notebook_params,omitempty"` + PythonParams []string `json:"python_params,omitempty" url:"python_params,omitempty"` + SparkSubmitParams []string `json:"spark_submit_params,omitempty" url:"spark_submit_params,omitempty"` +} diff --git a/aws/models/RunResultState.go b/aws/models/RunResultState.go index a03fe59..8b433ba 100644 --- a/aws/models/RunResultState.go +++ b/aws/models/RunResultState.go @@ -1,10 +1,10 @@ -package models - -type RunResultState string - -const ( - RunResultStateSuccess = "SUCCESS" - RunResultStateFailed = "FAILED" - RunResultStateTimedout = "TIMEDOUT" - RunResultStateCanceled = "CANCELED" -) +package models + +type RunResultState string + +const ( + RunResultStateSuccess = "SUCCESS" + RunResultStateFailed = "FAILED" + RunResultStateTimedout = "TIMEDOUT" + RunResultStateCanceled = "CANCELED" +) diff --git a/aws/models/RunState.go b/aws/models/RunState.go index 9c663bf..725c84f 100644 --- a/aws/models/RunState.go +++ b/aws/models/RunState.go @@ -1,7 +1,7 @@ -package models - -type RunState struct { - LifeCycleState *RunLifeCycleState `json:"life_cycle_state,omitempty" url:"life_cycle_state,omitempty"` - ResultState *RunResultState `json:"result_state,omitempty" url:"result_state,omitempty"` - StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"` -} +package models + +type RunState struct { + LifeCycleState *RunLifeCycleState `json:"life_cycle_state,omitempty" url:"life_cycle_state,omitempty"` + ResultState *RunResultState `json:"result_state,omitempty" url:"result_state,omitempty"` + StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"` +} diff --git a/aws/models/S3StorageInfo.go b/aws/models/S3StorageInfo.go index ed06276..2acbe94 100644 --- a/aws/models/S3StorageInfo.go +++ b/aws/models/S3StorageInfo.go @@ -1,11 +1,11 @@ -package models - -type S3StorageInfo struct { - Destination string `json:"destination,omitempty" url:"destination,omitempty"` - Region string `json:"region,omitempty" url:"region,omitempty"` - Endpoint string `json:"endpoint,omitempty" url:"endpoint,omitempty"` - EnableEncryption bool `json:"enable_encryption,omitempty" url:"enable_encryption,omitempty"` - EncryptionType string `json:"encryption_type,omitempty" url:"encryption_type,omitempty"` - KmsKey string `json:"kms_key,omitempty" url:"kms_key,omitempty"` - CannedACL string `json:"canned_acl,omitempty" url:"canned_acl,omitempty"` -} +package models + +type S3StorageInfo struct { + Destination string `json:"destination,omitempty" url:"destination,omitempty"` + Region string `json:"region,omitempty" url:"region,omitempty"` + Endpoint string `json:"endpoint,omitempty" url:"endpoint,omitempty"` + EnableEncryption bool `json:"enable_encryption,omitempty" url:"enable_encryption,omitempty"` + EncryptionType string `json:"encryption_type,omitempty" url:"encryption_type,omitempty"` + KmsKey string `json:"kms_key,omitempty" url:"kms_key,omitempty"` + CannedACL string `json:"canned_acl,omitempty" url:"canned_acl,omitempty"` +} diff --git a/aws/models/ScopeBackendType.go b/aws/models/ScopeBackendType.go index e93ebaf..2efccaf 100644 --- a/aws/models/ScopeBackendType.go +++ b/aws/models/ScopeBackendType.go @@ -1,7 +1,7 @@ -package models - -type ScopeBackendType string - -const ( - ScopeBackendTypeDatabricks = "DATABRICKS" -) +package models + +type ScopeBackendType string + +const ( + ScopeBackendTypeDatabricks = "DATABRICKS" +) diff --git a/aws/models/SecretMetadata.go b/aws/models/SecretMetadata.go index b1f8a58..4503bfa 100644 --- a/aws/models/SecretMetadata.go +++ b/aws/models/SecretMetadata.go @@ -1,6 +1,6 @@ -package models - -type SecretMetadata struct { - Key string `json:"key,omitempty" url:"key,omitempty"` - LastUpdatedTimestamp int64 `json:"last_updated_timestamp,omitempty" url:"last_updated_timestamp,omitempty"` -} +package models + +type SecretMetadata struct { + Key string `json:"key,omitempty" url:"key,omitempty"` + LastUpdatedTimestamp int64 `json:"last_updated_timestamp,omitempty" url:"last_updated_timestamp,omitempty"` +} diff --git a/aws/models/SecretScope.go b/aws/models/SecretScope.go index c83c4e1..380f54c 100644 --- a/aws/models/SecretScope.go +++ b/aws/models/SecretScope.go @@ -1,6 +1,6 @@ -package models - -type SecretScope struct { - Name string `json:"name,omitempty" url:"name,omitempty"` - BackendType *ScopeBackendType `json:"backend_type,omitempty" url:"backend_type,omitempty"` -} +package models + +type SecretScope struct { + Name string `json:"name,omitempty" url:"name,omitempty"` + BackendType *ScopeBackendType `json:"backend_type,omitempty" url:"backend_type,omitempty"` +} diff --git a/aws/models/SparkConfPair.go b/aws/models/SparkConfPair.go index 40f96c4..71f81c5 100644 --- a/aws/models/SparkConfPair.go +++ b/aws/models/SparkConfPair.go @@ -1,6 +1,6 @@ -package models - -type SparkConfPair struct { - Key string `json:"key,omitempty" url:"key,omitempty"` - Value string `json:"value,omitempty" url:"value,omitempty"` -} +package models + +type SparkConfPair struct { + Key string `json:"key,omitempty" url:"key,omitempty"` + Value string `json:"value,omitempty" url:"value,omitempty"` +} diff --git a/aws/models/SparkEnvPair.go b/aws/models/SparkEnvPair.go index aa9b3dd..6b70c7f 100644 --- a/aws/models/SparkEnvPair.go +++ b/aws/models/SparkEnvPair.go @@ -1,6 +1,6 @@ -package models - -type SparkEnvPair struct { - Key string `json:"key,omitempty" url:"key,omitempty"` - Value string `json:"value,omitempty" url:"value,omitempty"` -} +package models + +type SparkEnvPair struct { + Key string `json:"key,omitempty" url:"key,omitempty"` + Value string `json:"value,omitempty" url:"value,omitempty"` +} diff --git a/aws/models/SparkJarTask.go b/aws/models/SparkJarTask.go index c51437d..3ace25a 100644 --- a/aws/models/SparkJarTask.go +++ b/aws/models/SparkJarTask.go @@ -1,7 +1,7 @@ -package models - -type SparkJarTask struct { - JarURI string `json:"jar_uri,omitempty" url:"jar_uri,omitempty"` - MainClassName string `json:"main_class_name,omitempty" url:"main_class_name,omitempty"` - Parameters []string `json:"parameters,omitempty" url:"parameters,omitempty"` -} +package models + +type SparkJarTask struct { + JarURI string `json:"jar_uri,omitempty" url:"jar_uri,omitempty"` + MainClassName string `json:"main_class_name,omitempty" url:"main_class_name,omitempty"` + Parameters []string `json:"parameters,omitempty" url:"parameters,omitempty"` +} diff --git a/aws/models/SparkNode.go b/aws/models/SparkNode.go index 17348f1..609621d 100644 --- a/aws/models/SparkNode.go +++ b/aws/models/SparkNode.go @@ -1,11 +1,11 @@ -package models - -type SparkNode struct { - PrivateIP string `json:"private_ip,omitempty" url:"private_ip,omitempty"` - PublicDNS string `json:"public_dns,omitempty" url:"public_dns,omitempty"` - NodeID string `json:"node_id,omitempty" url:"node_id,omitempty"` - InstanceID string `json:"instance_id,omitempty" url:"instance_id,omitempty"` - StartTimestamp int64 `json:"start_timestamp,omitempty" url:"start_timestamp,omitempty"` - NodeAwsAttributes *SparkNodeAwsAttributes `json:"node_aws_attributes,omitempty" url:"node_aws_attributes,omitempty"` - HostPrivateIP string `json:"host_private_ip,omitempty" url:"host_private_ip,omitempty"` -} +package models + +type SparkNode struct { + PrivateIP string `json:"private_ip,omitempty" url:"private_ip,omitempty"` + PublicDNS string `json:"public_dns,omitempty" url:"public_dns,omitempty"` + NodeID string `json:"node_id,omitempty" url:"node_id,omitempty"` + InstanceID string `json:"instance_id,omitempty" url:"instance_id,omitempty"` + StartTimestamp int64 `json:"start_timestamp,omitempty" url:"start_timestamp,omitempty"` + NodeAwsAttributes *SparkNodeAwsAttributes `json:"node_aws_attributes,omitempty" url:"node_aws_attributes,omitempty"` + HostPrivateIP string `json:"host_private_ip,omitempty" url:"host_private_ip,omitempty"` +} diff --git a/aws/models/SparkNodeAwsAttributes.go b/aws/models/SparkNodeAwsAttributes.go index 7449834..5d913e3 100644 --- a/aws/models/SparkNodeAwsAttributes.go +++ b/aws/models/SparkNodeAwsAttributes.go @@ -1,5 +1,5 @@ -package models - -type SparkNodeAwsAttributes struct { - IsSpot bool `json:"is_spot,omitempty" url:"is_spot,omitempty"` -} +package models + +type SparkNodeAwsAttributes struct { + IsSpot bool `json:"is_spot,omitempty" url:"is_spot,omitempty"` +} diff --git a/aws/models/SparkPythonTask.go b/aws/models/SparkPythonTask.go index af5ebf5..7a0443d 100644 --- a/aws/models/SparkPythonTask.go +++ b/aws/models/SparkPythonTask.go @@ -1,6 +1,6 @@ -package models - -type SparkPythonTask struct { - PythonFile string `json:"python_file,omitempty" url:"python_file,omitempty"` - Parameters []string `json:"parameters,omitempty" url:"parameters,omitempty"` -} +package models + +type SparkPythonTask struct { + PythonFile string `json:"python_file,omitempty" url:"python_file,omitempty"` + Parameters []string `json:"parameters,omitempty" url:"parameters,omitempty"` +} diff --git a/aws/models/SparkSubmitTask.go b/aws/models/SparkSubmitTask.go index a492b93..4f3d657 100644 --- a/aws/models/SparkSubmitTask.go +++ b/aws/models/SparkSubmitTask.go @@ -1,5 +1,5 @@ -package models - -type SparkSubmitTask struct { - Parameters []string `json:"parameters,omitempty" url:"parameters,omitempty"` -} +package models + +type SparkSubmitTask struct { + Parameters []string `json:"parameters,omitempty" url:"parameters,omitempty"` +} diff --git a/aws/models/SparkVersion.go b/aws/models/SparkVersion.go index 9b22893..a514033 100644 --- a/aws/models/SparkVersion.go +++ b/aws/models/SparkVersion.go @@ -1,6 +1,6 @@ -package models - -type SparkVersion struct { - Key string `json:"key,omitempty" url:"key,omitempty"` - Name string `json:"name,omitempty" url:"name,omitempty"` -} +package models + +type SparkVersion struct { + Key string `json:"key,omitempty" url:"key,omitempty"` + Name string `json:"name,omitempty" url:"name,omitempty"` +} diff --git a/aws/models/TerminationCode.go b/aws/models/TerminationCode.go index ea47709..3f0a227 100644 --- a/aws/models/TerminationCode.go +++ b/aws/models/TerminationCode.go @@ -1,20 +1,20 @@ -package models - -type TerminationCode string - -const ( - TerminationCodeUserRequest = "USER_REQUEST" - TerminationCodeJobFinished = "JOB_FINISHED" - TerminationCodeInactivity = "INACTIVITY" - TerminationCodeCloudProviderShutdown = "CLOUD_PROVIDER_SHUTDOWN" - TerminationCodeCommunicationLost = "COMMUNICATION_LOST" - TerminationCodeCloudProviderLaunchFailure = "CLOUD_PROVIDER_LAUNCH_FAILURE" - TerminationCodeSparkStartupFailure = "SPARK_STARTUP_FAILURE" - TerminationCodeInvalidArgument = "INVALID_ARGUMENT" - TerminationCodeUnexpectedLaunchFailure = "UNEXPECTED_LAUNCH_FAILURE" - TerminationCodeInternalError = "INTERNAL_ERROR" - TerminationCodeInstanceUnreachable = "INSTANCE_UNREACHABLE" - TerminationCodeRequestRejected = "REQUEST_REJECTED" - TerminationCodeInitScriptFailure = "INIT_SCRIPT_FAILURE" - TerminationCodeTrialExpired = "TRIAL_EXPIRED" -) +package models + +type TerminationCode string + +const ( + TerminationCodeUserRequest = "USER_REQUEST" + TerminationCodeJobFinished = "JOB_FINISHED" + TerminationCodeInactivity = "INACTIVITY" + TerminationCodeCloudProviderShutdown = "CLOUD_PROVIDER_SHUTDOWN" + TerminationCodeCommunicationLost = "COMMUNICATION_LOST" + TerminationCodeCloudProviderLaunchFailure = "CLOUD_PROVIDER_LAUNCH_FAILURE" + TerminationCodeSparkStartupFailure = "SPARK_STARTUP_FAILURE" + TerminationCodeInvalidArgument = "INVALID_ARGUMENT" + TerminationCodeUnexpectedLaunchFailure = "UNEXPECTED_LAUNCH_FAILURE" + TerminationCodeInternalError = "INTERNAL_ERROR" + TerminationCodeInstanceUnreachable = "INSTANCE_UNREACHABLE" + TerminationCodeRequestRejected = "REQUEST_REJECTED" + TerminationCodeInitScriptFailure = "INIT_SCRIPT_FAILURE" + TerminationCodeTrialExpired = "TRIAL_EXPIRED" +) diff --git a/aws/models/TerminationParameter.go b/aws/models/TerminationParameter.go index cdd8863..f6167ce 100644 --- a/aws/models/TerminationParameter.go +++ b/aws/models/TerminationParameter.go @@ -1,17 +1,17 @@ -package models - -type TerminationParameter string - -const ( - TerminationParameterUsername = "username" - TerminationParameterAwsAPIErrorCode = "aws_api_error_code" - TerminationParameterAwsInstanceStateReason = "aws_instance_state_reason" - TerminationParameterAwsSpotRequestStatus = "aws_spot_request_status" - TerminationParameterAwsSpotRequestFaultCode = "aws_spot_request_fault_code" - TerminationParameterAwsImpairedStatusDetails = "aws_impaired_status_details" - TerminationParameterAwsInstanceStatusEvent = "aws_instance_status_event" - TerminationParameterAwsErrorMessage = "aws_error_message" - TerminationParameterDatabricksErrorMessage = "databricks_error_message" - TerminationParameterInactivityDurationMin = "inactivity_duration_min" - TerminationParameterInstanceID = "instance_id" -) +package models + +type TerminationParameter string + +const ( + TerminationParameterUsername = "username" + TerminationParameterAwsAPIErrorCode = "aws_api_error_code" + TerminationParameterAwsInstanceStateReason = "aws_instance_state_reason" + TerminationParameterAwsSpotRequestStatus = "aws_spot_request_status" + TerminationParameterAwsSpotRequestFaultCode = "aws_spot_request_fault_code" + TerminationParameterAwsImpairedStatusDetails = "aws_impaired_status_details" + TerminationParameterAwsInstanceStatusEvent = "aws_instance_status_event" + TerminationParameterAwsErrorMessage = "aws_error_message" + TerminationParameterDatabricksErrorMessage = "databricks_error_message" + TerminationParameterInactivityDurationMin = "inactivity_duration_min" + TerminationParameterInstanceID = "instance_id" +) diff --git a/aws/models/TerminationReason.go b/aws/models/TerminationReason.go index ce88778..307401f 100644 --- a/aws/models/TerminationReason.go +++ b/aws/models/TerminationReason.go @@ -1,6 +1,6 @@ -package models - -type TerminationReason struct { - Code *TerminationCode `json:"code,omitempty" url:"code,omitempty"` - Parameters []ParameterPair `json:"parameters,omitempty" url:"parameters,omitempty"` -} +package models + +type TerminationReason struct { + Code *TerminationCode `json:"code,omitempty" url:"code,omitempty"` + Parameters []ParameterPair `json:"parameters,omitempty" url:"parameters,omitempty"` +} diff --git a/aws/models/TriggerType.go b/aws/models/TriggerType.go index dad32c9..c8d93b8 100644 --- a/aws/models/TriggerType.go +++ b/aws/models/TriggerType.go @@ -1,9 +1,9 @@ -package models - -type TriggerType string - -const ( - TriggerTypePeriodic = "PERIODIC" - TriggerTypeOneTime = "ONE_TIME" - TriggerTypeRetry = "RETRY" -) +package models + +type TriggerType string + +const ( + TriggerTypePeriodic = "PERIODIC" + TriggerTypeOneTime = "ONE_TIME" + TriggerTypeRetry = "RETRY" +) diff --git a/aws/models/ViewItem.go b/aws/models/ViewItem.go index 58ae2a4..7296d0e 100644 --- a/aws/models/ViewItem.go +++ b/aws/models/ViewItem.go @@ -1,7 +1,7 @@ -package models - -type ViewItem struct { - Content string `json:"content,omitempty" url:"content,omitempty"` - Name string `json:"name,omitempty" url:"name,omitempty"` - Type *ViewType `json:"type,omitempty" url:"type,omitempty"` -} +package models + +type ViewItem struct { + Content string `json:"content,omitempty" url:"content,omitempty"` + Name string `json:"name,omitempty" url:"name,omitempty"` + Type *ViewType `json:"type,omitempty" url:"type,omitempty"` +} diff --git a/aws/models/ViewType.go b/aws/models/ViewType.go index c3e0473..b936383 100644 --- a/aws/models/ViewType.go +++ b/aws/models/ViewType.go @@ -1,8 +1,8 @@ -package models - -type ViewType string - -const ( - ViewTypeNotebook = "NOTEBOOK" - ViewTypeDashboard = "DASHBOARD" -) +package models + +type ViewType string + +const ( + ViewTypeNotebook = "NOTEBOOK" + ViewTypeDashboard = "DASHBOARD" +) diff --git a/aws/models/ViewsToExport.go b/aws/models/ViewsToExport.go index 53c504d..7ed0578 100644 --- a/aws/models/ViewsToExport.go +++ b/aws/models/ViewsToExport.go @@ -1,9 +1,9 @@ -package models - -type ViewsToExport string - -const ( - ViewsToExportCode = "CODE" - ViewsToExportDashboards = "DASHBOARDS" - ViewsToExportAll = "ALL" -) +package models + +type ViewsToExport string + +const ( + ViewsToExportCode = "CODE" + ViewsToExportDashboards = "DASHBOARDS" + ViewsToExportAll = "ALL" +) diff --git a/aws/models/deepcopy_generated.go b/aws/models/deepcopy_generated.go index 63759b3..558a53d 100644 --- a/aws/models/deepcopy_generated.go +++ b/aws/models/deepcopy_generated.go @@ -1,1481 +1,1481 @@ -// +build !ignore_autogenerated - -// Code generated by deepcopy-gen. DO NOT EDIT. - -package models - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *AclItem) DeepCopyInto(out *AclItem) { - *out = *in - if in.Permission != nil { - in, out := &in.Permission, &out.Permission - *out = new(AclPermission) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AclItem. -func (in *AclItem) DeepCopy() *AclItem { - if in == nil { - return nil - } - out := new(AclItem) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *AutoScale) DeepCopyInto(out *AutoScale) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoScale. -func (in *AutoScale) DeepCopy() *AutoScale { - if in == nil { - return nil - } - out := new(AutoScale) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *AwsAttributes) DeepCopyInto(out *AwsAttributes) { - *out = *in - if in.Availability != nil { - in, out := &in.Availability, &out.Availability - *out = new(AwsAvailability) - **out = **in - } - if in.EbsVolumeType != nil { - in, out := &in.EbsVolumeType, &out.EbsVolumeType - *out = new(EbsVolumeType) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AwsAttributes. -func (in *AwsAttributes) DeepCopy() *AwsAttributes { - if in == nil { - return nil - } - out := new(AwsAttributes) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterAttributes) DeepCopyInto(out *ClusterAttributes) { - *out = *in - if in.SparkConf != nil { - in, out := &in.SparkConf, &out.SparkConf - *out = new(SparkConfPair) - **out = **in - } - if in.AwsAttributes != nil { - in, out := &in.AwsAttributes, &out.AwsAttributes - *out = new(AwsAttributes) - (*in).DeepCopyInto(*out) - } - if in.SSHPublicKeys != nil { - in, out := &in.SSHPublicKeys, &out.SSHPublicKeys - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.CustomTags != nil { - in, out := &in.CustomTags, &out.CustomTags - *out = make([]ClusterTag, len(*in)) - copy(*out, *in) - } - if in.ClusterLogConf != nil { - in, out := &in.ClusterLogConf, &out.ClusterLogConf - *out = new(ClusterLogConf) - (*in).DeepCopyInto(*out) - } - if in.InitScripts != nil { - in, out := &in.InitScripts, &out.InitScripts - *out = make([]InitScriptInfo, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.SparkEnvVars != nil { - in, out := &in.SparkEnvVars, &out.SparkEnvVars - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.ClusterSource != nil { - in, out := &in.ClusterSource, &out.ClusterSource - *out = new(AwsAvailability) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterAttributes. -func (in *ClusterAttributes) DeepCopy() *ClusterAttributes { - if in == nil { - return nil - } - out := new(ClusterAttributes) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterCloudProviderNodeInfo) DeepCopyInto(out *ClusterCloudProviderNodeInfo) { - *out = *in - if in.Status != nil { - in, out := &in.Status, &out.Status - *out = new(ClusterCloudProviderNodeStatus) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterCloudProviderNodeInfo. -func (in *ClusterCloudProviderNodeInfo) DeepCopy() *ClusterCloudProviderNodeInfo { - if in == nil { - return nil - } - out := new(ClusterCloudProviderNodeInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterEvent) DeepCopyInto(out *ClusterEvent) { - *out = *in - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(ClusterEventType) - **out = **in - } - if in.Details != nil { - in, out := &in.Details, &out.Details - *out = new(AwsAttributes) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterEvent. -func (in *ClusterEvent) DeepCopy() *ClusterEvent { - if in == nil { - return nil - } - out := new(ClusterEvent) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterInfo) DeepCopyInto(out *ClusterInfo) { - *out = *in - if in.AutoScale != nil { - in, out := &in.AutoScale, &out.AutoScale - *out = new(AutoScale) - **out = **in - } - if in.Driver != nil { - in, out := &in.Driver, &out.Driver - *out = new(SparkNode) - (*in).DeepCopyInto(*out) - } - if in.Executors != nil { - in, out := &in.Executors, &out.Executors - *out = make([]SparkNode, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.SparkConf != nil { - in, out := &in.SparkConf, &out.SparkConf - *out = new(SparkConfPair) - **out = **in - } - if in.AwsAttributes != nil { - in, out := &in.AwsAttributes, &out.AwsAttributes - *out = new(AwsAttributes) - (*in).DeepCopyInto(*out) - } - if in.SSHPublicKeys != nil { - in, out := &in.SSHPublicKeys, &out.SSHPublicKeys - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.CustomTags != nil { - in, out := &in.CustomTags, &out.CustomTags - *out = make([]ClusterTag, len(*in)) - copy(*out, *in) - } - if in.ClusterLogConf != nil { - in, out := &in.ClusterLogConf, &out.ClusterLogConf - *out = new(ClusterLogConf) - (*in).DeepCopyInto(*out) - } - if in.InitScripts != nil { - in, out := &in.InitScripts, &out.InitScripts - *out = make([]InitScriptInfo, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.SparkEnvVars != nil { - in, out := &in.SparkEnvVars, &out.SparkEnvVars - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.ClusterSource != nil { - in, out := &in.ClusterSource, &out.ClusterSource - *out = new(AwsAvailability) - **out = **in - } - if in.State != nil { - in, out := &in.State, &out.State - *out = new(ClusterState) - **out = **in - } - if in.DefaultTags != nil { - in, out := &in.DefaultTags, &out.DefaultTags - *out = make([]ClusterTag, len(*in)) - copy(*out, *in) - } - if in.ClusterLogStatus != nil { - in, out := &in.ClusterLogStatus, &out.ClusterLogStatus - *out = new(LogSyncStatus) - **out = **in - } - if in.TerminationReason != nil { - in, out := &in.TerminationReason, &out.TerminationReason - *out = new(S3StorageInfo) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterInfo. -func (in *ClusterInfo) DeepCopy() *ClusterInfo { - if in == nil { - return nil - } - out := new(ClusterInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterInstance) DeepCopyInto(out *ClusterInstance) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterInstance. -func (in *ClusterInstance) DeepCopy() *ClusterInstance { - if in == nil { - return nil - } - out := new(ClusterInstance) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterLibraryStatuses) DeepCopyInto(out *ClusterLibraryStatuses) { - *out = *in - if in.LibraryStatuses != nil { - in, out := &in.LibraryStatuses, &out.LibraryStatuses - *out = make([]LibraryFullStatus, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterLibraryStatuses. -func (in *ClusterLibraryStatuses) DeepCopy() *ClusterLibraryStatuses { - if in == nil { - return nil - } - out := new(ClusterLibraryStatuses) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterLogConf) DeepCopyInto(out *ClusterLogConf) { - *out = *in - if in.Dbfs != nil { - in, out := &in.Dbfs, &out.Dbfs - *out = new(DbfsStorageInfo) - **out = **in - } - if in.S3 != nil { - in, out := &in.S3, &out.S3 - *out = new(S3StorageInfo) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterLogConf. -func (in *ClusterLogConf) DeepCopy() *ClusterLogConf { - if in == nil { - return nil - } - out := new(ClusterLogConf) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterSize) DeepCopyInto(out *ClusterSize) { - *out = *in - if in.Autoscale != nil { - in, out := &in.Autoscale, &out.Autoscale - *out = new(AutoScale) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSize. -func (in *ClusterSize) DeepCopy() *ClusterSize { - if in == nil { - return nil - } - out := new(ClusterSize) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterSpec) DeepCopyInto(out *ClusterSpec) { - *out = *in - if in.NewCluster != nil { - in, out := &in.NewCluster, &out.NewCluster - *out = new(NewCluster) - (*in).DeepCopyInto(*out) - } - if in.Libraries != nil { - in, out := &in.Libraries, &out.Libraries - *out = make([]Library, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpec. -func (in *ClusterSpec) DeepCopy() *ClusterSpec { - if in == nil { - return nil - } - out := new(ClusterSpec) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterTag) DeepCopyInto(out *ClusterTag) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterTag. -func (in *ClusterTag) DeepCopy() *ClusterTag { - if in == nil { - return nil - } - out := new(ClusterTag) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *CronSchedule) DeepCopyInto(out *CronSchedule) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CronSchedule. -func (in *CronSchedule) DeepCopy() *CronSchedule { - if in == nil { - return nil - } - out := new(CronSchedule) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DbfsStorageInfo) DeepCopyInto(out *DbfsStorageInfo) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsStorageInfo. -func (in *DbfsStorageInfo) DeepCopy() *DbfsStorageInfo { - if in == nil { - return nil - } - out := new(DbfsStorageInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DiskSpec) DeepCopyInto(out *DiskSpec) { - *out = *in - if in.DiskType != nil { - in, out := &in.DiskType, &out.DiskType - *out = new(DiskType) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DiskSpec. -func (in *DiskSpec) DeepCopy() *DiskSpec { - if in == nil { - return nil - } - out := new(DiskSpec) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DiskType) DeepCopyInto(out *DiskType) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DiskType. -func (in *DiskType) DeepCopy() *DiskType { - if in == nil { - return nil - } - out := new(DiskType) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EventDetails) DeepCopyInto(out *EventDetails) { - *out = *in - if in.PreviousAttributes != nil { - in, out := &in.PreviousAttributes, &out.PreviousAttributes - *out = new(ClusterAttributes) - (*in).DeepCopyInto(*out) - } - if in.Attributes != nil { - in, out := &in.Attributes, &out.Attributes - *out = new(ClusterAttributes) - (*in).DeepCopyInto(*out) - } - if in.PreviousClusterSize != nil { - in, out := &in.PreviousClusterSize, &out.PreviousClusterSize - *out = new(ClusterSize) - (*in).DeepCopyInto(*out) - } - if in.ClusterSize != nil { - in, out := &in.ClusterSize, &out.ClusterSize - *out = new(ClusterSize) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventDetails. -func (in *EventDetails) DeepCopy() *EventDetails { - if in == nil { - return nil - } - out := new(EventDetails) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *FileInfo) DeepCopyInto(out *FileInfo) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FileInfo. -func (in *FileInfo) DeepCopy() *FileInfo { - if in == nil { - return nil - } - out := new(FileInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InitScriptInfo) DeepCopyInto(out *InitScriptInfo) { - *out = *in - if in.Dbfs != nil { - in, out := &in.Dbfs, &out.Dbfs - *out = new(DbfsStorageInfo) - **out = **in - } - if in.S3 != nil { - in, out := &in.S3, &out.S3 - *out = new(S3StorageInfo) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InitScriptInfo. -func (in *InitScriptInfo) DeepCopy() *InitScriptInfo { - if in == nil { - return nil - } - out := new(InitScriptInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InstancePoolAndStats) DeepCopyInto(out *InstancePoolAndStats) { - *out = *in - out.AwsAttributes = in.AwsAttributes - if in.CustomTags != nil { - in, out := &in.CustomTags, &out.CustomTags - *out = make([]ClusterTag, len(*in)) - copy(*out, *in) - } - in.DiskSpec.DeepCopyInto(&out.DiskSpec) - if in.PreloadedSparkVersions != nil { - in, out := &in.PreloadedSparkVersions, &out.PreloadedSparkVersions - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.DefaultTags != nil { - in, out := &in.DefaultTags, &out.DefaultTags - *out = make([]ClusterTag, len(*in)) - copy(*out, *in) - } - out.Stats = in.Stats - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstancePoolAndStats. -func (in *InstancePoolAndStats) DeepCopy() *InstancePoolAndStats { - if in == nil { - return nil - } - out := new(InstancePoolAndStats) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InstancePoolAwsAttributes) DeepCopyInto(out *InstancePoolAwsAttributes) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstancePoolAwsAttributes. -func (in *InstancePoolAwsAttributes) DeepCopy() *InstancePoolAwsAttributes { - if in == nil { - return nil - } - out := new(InstancePoolAwsAttributes) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InstancePoolStats) DeepCopyInto(out *InstancePoolStats) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstancePoolStats. -func (in *InstancePoolStats) DeepCopy() *InstancePoolStats { - if in == nil { - return nil - } - out := new(InstancePoolStats) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InstanceProfile) DeepCopyInto(out *InstanceProfile) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstanceProfile. -func (in *InstanceProfile) DeepCopy() *InstanceProfile { - if in == nil { - return nil - } - out := new(InstanceProfile) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Job) DeepCopyInto(out *Job) { - *out = *in - if in.Settings != nil { - in, out := &in.Settings, &out.Settings - *out = new(JobSettings) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Job. -func (in *Job) DeepCopy() *Job { - if in == nil { - return nil - } - out := new(Job) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobEmailNotifications) DeepCopyInto(out *JobEmailNotifications) { - *out = *in - if in.OnStart != nil { - in, out := &in.OnStart, &out.OnStart - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.OnSuccess != nil { - in, out := &in.OnSuccess, &out.OnSuccess - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.OnFailure != nil { - in, out := &in.OnFailure, &out.OnFailure - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobEmailNotifications. -func (in *JobEmailNotifications) DeepCopy() *JobEmailNotifications { - if in == nil { - return nil - } - out := new(JobEmailNotifications) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobSettings) DeepCopyInto(out *JobSettings) { - *out = *in - if in.NewCluster != nil { - in, out := &in.NewCluster, &out.NewCluster - *out = new(NewCluster) - (*in).DeepCopyInto(*out) - } - if in.NotebookTask != nil { - in, out := &in.NotebookTask, &out.NotebookTask - *out = new(NotebookTask) - (*in).DeepCopyInto(*out) - } - if in.SparkJarTask != nil { - in, out := &in.SparkJarTask, &out.SparkJarTask - *out = new(SparkJarTask) - (*in).DeepCopyInto(*out) - } - if in.SparkPythonTask != nil { - in, out := &in.SparkPythonTask, &out.SparkPythonTask - *out = new(SparkPythonTask) - (*in).DeepCopyInto(*out) - } - if in.SparkSubmitTask != nil { - in, out := &in.SparkSubmitTask, &out.SparkSubmitTask - *out = new(SparkSubmitTask) - (*in).DeepCopyInto(*out) - } - if in.Libraries != nil { - in, out := &in.Libraries, &out.Libraries - *out = make([]Library, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.EmailNotifications != nil { - in, out := &in.EmailNotifications, &out.EmailNotifications - *out = new(JobEmailNotifications) - (*in).DeepCopyInto(*out) - } - if in.Schedule != nil { - in, out := &in.Schedule, &out.Schedule - *out = new(CronSchedule) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSettings. -func (in *JobSettings) DeepCopy() *JobSettings { - if in == nil { - return nil - } - out := new(JobSettings) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobTask) DeepCopyInto(out *JobTask) { - *out = *in - if in.NotebookTask != nil { - in, out := &in.NotebookTask, &out.NotebookTask - *out = new(NotebookTask) - (*in).DeepCopyInto(*out) - } - if in.SparkJarTask != nil { - in, out := &in.SparkJarTask, &out.SparkJarTask - *out = new(SparkJarTask) - (*in).DeepCopyInto(*out) - } - if in.SparkPythonTask != nil { - in, out := &in.SparkPythonTask, &out.SparkPythonTask - *out = new(SparkPythonTask) - (*in).DeepCopyInto(*out) - } - if in.SparkSubmitTask != nil { - in, out := &in.SparkSubmitTask, &out.SparkSubmitTask - *out = new(SparkSubmitTask) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobTask. -func (in *JobTask) DeepCopy() *JobTask { - if in == nil { - return nil - } - out := new(JobTask) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Library) DeepCopyInto(out *Library) { - *out = *in - if in.Pypi != nil { - in, out := &in.Pypi, &out.Pypi - *out = new(PythonPyPiLibrary) - **out = **in - } - if in.Maven != nil { - in, out := &in.Maven, &out.Maven - *out = new(MavenLibrary) - (*in).DeepCopyInto(*out) - } - if in.Cran != nil { - in, out := &in.Cran, &out.Cran - *out = new(RCranLibrary) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Library. -func (in *Library) DeepCopy() *Library { - if in == nil { - return nil - } - out := new(Library) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LibraryFullStatus) DeepCopyInto(out *LibraryFullStatus) { - *out = *in - if in.Library != nil { - in, out := &in.Library, &out.Library - *out = new(Library) - (*in).DeepCopyInto(*out) - } - if in.Status != nil { - in, out := &in.Status, &out.Status - *out = new(LibraryInstallStatus) - **out = **in - } - if in.Messages != nil { - in, out := &in.Messages, &out.Messages - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LibraryFullStatus. -func (in *LibraryFullStatus) DeepCopy() *LibraryFullStatus { - if in == nil { - return nil - } - out := new(LibraryFullStatus) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LogSyncStatus) DeepCopyInto(out *LogSyncStatus) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogSyncStatus. -func (in *LogSyncStatus) DeepCopy() *LogSyncStatus { - if in == nil { - return nil - } - out := new(LogSyncStatus) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *MavenLibrary) DeepCopyInto(out *MavenLibrary) { - *out = *in - if in.Exclusions != nil { - in, out := &in.Exclusions, &out.Exclusions - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MavenLibrary. -func (in *MavenLibrary) DeepCopy() *MavenLibrary { - if in == nil { - return nil - } - out := new(MavenLibrary) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *NewCluster) DeepCopyInto(out *NewCluster) { - *out = *in - if in.Autoscale != nil { - in, out := &in.Autoscale, &out.Autoscale - *out = new(AutoScale) - **out = **in - } - if in.SparkConf != nil { - in, out := &in.SparkConf, &out.SparkConf - *out = new(SparkConfPair) - **out = **in - } - if in.AwsAttributes != nil { - in, out := &in.AwsAttributes, &out.AwsAttributes - *out = new(AwsAttributes) - (*in).DeepCopyInto(*out) - } - if in.SSHPublicKeys != nil { - in, out := &in.SSHPublicKeys, &out.SSHPublicKeys - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.CustomTags != nil { - in, out := &in.CustomTags, &out.CustomTags - *out = make([]ClusterTag, len(*in)) - copy(*out, *in) - } - if in.ClusterLogConf != nil { - in, out := &in.ClusterLogConf, &out.ClusterLogConf - *out = new(ClusterLogConf) - (*in).DeepCopyInto(*out) - } - if in.InitScripts != nil { - in, out := &in.InitScripts, &out.InitScripts - *out = make([]InitScriptInfo, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.SparkEnvVars != nil { - in, out := &in.SparkEnvVars, &out.SparkEnvVars - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NewCluster. -func (in *NewCluster) DeepCopy() *NewCluster { - if in == nil { - return nil - } - out := new(NewCluster) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *NodeType) DeepCopyInto(out *NodeType) { - *out = *in - if in.NodeInfo != nil { - in, out := &in.NodeInfo, &out.NodeInfo - *out = new(ClusterCloudProviderNodeInfo) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodeType. -func (in *NodeType) DeepCopy() *NodeType { - if in == nil { - return nil - } - out := new(NodeType) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *NotebookOutput) DeepCopyInto(out *NotebookOutput) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NotebookOutput. -func (in *NotebookOutput) DeepCopy() *NotebookOutput { - if in == nil { - return nil - } - out := new(NotebookOutput) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *NotebookTask) DeepCopyInto(out *NotebookTask) { - *out = *in - if in.BaseParameters != nil { - in, out := &in.BaseParameters, &out.BaseParameters - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NotebookTask. -func (in *NotebookTask) DeepCopy() *NotebookTask { - if in == nil { - return nil - } - out := new(NotebookTask) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ObjectInfo) DeepCopyInto(out *ObjectInfo) { - *out = *in - if in.ObjectType != nil { - in, out := &in.ObjectType, &out.ObjectType - *out = new(ObjectType) - **out = **in - } - if in.Language != nil { - in, out := &in.Language, &out.Language - *out = new(Language) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ObjectInfo. -func (in *ObjectInfo) DeepCopy() *ObjectInfo { - if in == nil { - return nil - } - out := new(ObjectInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ParamPair) DeepCopyInto(out *ParamPair) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParamPair. -func (in *ParamPair) DeepCopy() *ParamPair { - if in == nil { - return nil - } - out := new(ParamPair) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ParameterPair) DeepCopyInto(out *ParameterPair) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParameterPair. -func (in *ParameterPair) DeepCopy() *ParameterPair { - if in == nil { - return nil - } - out := new(ParameterPair) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PrincipalName) DeepCopyInto(out *PrincipalName) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrincipalName. -func (in *PrincipalName) DeepCopy() *PrincipalName { - if in == nil { - return nil - } - out := new(PrincipalName) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PublicTokenInfo) DeepCopyInto(out *PublicTokenInfo) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PublicTokenInfo. -func (in *PublicTokenInfo) DeepCopy() *PublicTokenInfo { - if in == nil { - return nil - } - out := new(PublicTokenInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PythonPyPiLibrary) DeepCopyInto(out *PythonPyPiLibrary) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PythonPyPiLibrary. -func (in *PythonPyPiLibrary) DeepCopy() *PythonPyPiLibrary { - if in == nil { - return nil - } - out := new(PythonPyPiLibrary) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RCranLibrary) DeepCopyInto(out *RCranLibrary) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RCranLibrary. -func (in *RCranLibrary) DeepCopy() *RCranLibrary { - if in == nil { - return nil - } - out := new(RCranLibrary) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Run) DeepCopyInto(out *Run) { - *out = *in - if in.State != nil { - in, out := &in.State, &out.State - *out = new(RunState) - (*in).DeepCopyInto(*out) - } - if in.Schedule != nil { - in, out := &in.Schedule, &out.Schedule - *out = new(CronSchedule) - **out = **in - } - if in.Task != nil { - in, out := &in.Task, &out.Task - *out = new(JobTask) - (*in).DeepCopyInto(*out) - } - if in.ClusterSpec != nil { - in, out := &in.ClusterSpec, &out.ClusterSpec - *out = new(ClusterSpec) - (*in).DeepCopyInto(*out) - } - if in.ClusterInstance != nil { - in, out := &in.ClusterInstance, &out.ClusterInstance - *out = new(ClusterInstance) - **out = **in - } - if in.OverridingParameters != nil { - in, out := &in.OverridingParameters, &out.OverridingParameters - *out = new(RunParameters) - (*in).DeepCopyInto(*out) - } - if in.Trigger != nil { - in, out := &in.Trigger, &out.Trigger - *out = new(TriggerType) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Run. -func (in *Run) DeepCopy() *Run { - if in == nil { - return nil - } - out := new(Run) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RunParameters) DeepCopyInto(out *RunParameters) { - *out = *in - if in.JarParams != nil { - in, out := &in.JarParams, &out.JarParams - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.NotebookParams != nil { - in, out := &in.NotebookParams, &out.NotebookParams - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.PythonParams != nil { - in, out := &in.PythonParams, &out.PythonParams - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.SparkSubmitParams != nil { - in, out := &in.SparkSubmitParams, &out.SparkSubmitParams - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RunParameters. -func (in *RunParameters) DeepCopy() *RunParameters { - if in == nil { - return nil - } - out := new(RunParameters) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RunState) DeepCopyInto(out *RunState) { - *out = *in - if in.LifeCycleState != nil { - in, out := &in.LifeCycleState, &out.LifeCycleState - *out = new(RunLifeCycleState) - **out = **in - } - if in.ResultState != nil { - in, out := &in.ResultState, &out.ResultState - *out = new(RunResultState) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RunState. -func (in *RunState) DeepCopy() *RunState { - if in == nil { - return nil - } - out := new(RunState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *S3StorageInfo) DeepCopyInto(out *S3StorageInfo) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new S3StorageInfo. -func (in *S3StorageInfo) DeepCopy() *S3StorageInfo { - if in == nil { - return nil - } - out := new(S3StorageInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SecretMetadata) DeepCopyInto(out *SecretMetadata) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecretMetadata. -func (in *SecretMetadata) DeepCopy() *SecretMetadata { - if in == nil { - return nil - } - out := new(SecretMetadata) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SecretScope) DeepCopyInto(out *SecretScope) { - *out = *in - if in.BackendType != nil { - in, out := &in.BackendType, &out.BackendType - *out = new(ScopeBackendType) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecretScope. -func (in *SecretScope) DeepCopy() *SecretScope { - if in == nil { - return nil - } - out := new(SecretScope) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkConfPair) DeepCopyInto(out *SparkConfPair) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkConfPair. -func (in *SparkConfPair) DeepCopy() *SparkConfPair { - if in == nil { - return nil - } - out := new(SparkConfPair) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkEnvPair) DeepCopyInto(out *SparkEnvPair) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkEnvPair. -func (in *SparkEnvPair) DeepCopy() *SparkEnvPair { - if in == nil { - return nil - } - out := new(SparkEnvPair) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkJarTask) DeepCopyInto(out *SparkJarTask) { - *out = *in - if in.Parameters != nil { - in, out := &in.Parameters, &out.Parameters - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkJarTask. -func (in *SparkJarTask) DeepCopy() *SparkJarTask { - if in == nil { - return nil - } - out := new(SparkJarTask) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkNode) DeepCopyInto(out *SparkNode) { - *out = *in - if in.NodeAwsAttributes != nil { - in, out := &in.NodeAwsAttributes, &out.NodeAwsAttributes - *out = new(SparkNodeAwsAttributes) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkNode. -func (in *SparkNode) DeepCopy() *SparkNode { - if in == nil { - return nil - } - out := new(SparkNode) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkNodeAwsAttributes) DeepCopyInto(out *SparkNodeAwsAttributes) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkNodeAwsAttributes. -func (in *SparkNodeAwsAttributes) DeepCopy() *SparkNodeAwsAttributes { - if in == nil { - return nil - } - out := new(SparkNodeAwsAttributes) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkPythonTask) DeepCopyInto(out *SparkPythonTask) { - *out = *in - if in.Parameters != nil { - in, out := &in.Parameters, &out.Parameters - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkPythonTask. -func (in *SparkPythonTask) DeepCopy() *SparkPythonTask { - if in == nil { - return nil - } - out := new(SparkPythonTask) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkSubmitTask) DeepCopyInto(out *SparkSubmitTask) { - *out = *in - if in.Parameters != nil { - in, out := &in.Parameters, &out.Parameters - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkSubmitTask. -func (in *SparkSubmitTask) DeepCopy() *SparkSubmitTask { - if in == nil { - return nil - } - out := new(SparkSubmitTask) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkVersion) DeepCopyInto(out *SparkVersion) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkVersion. -func (in *SparkVersion) DeepCopy() *SparkVersion { - if in == nil { - return nil - } - out := new(SparkVersion) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TerminationReason) DeepCopyInto(out *TerminationReason) { - *out = *in - if in.Code != nil { - in, out := &in.Code, &out.Code - *out = new(TerminationCode) - **out = **in - } - if in.Parameters != nil { - in, out := &in.Parameters, &out.Parameters - *out = make([]ParameterPair, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TerminationReason. -func (in *TerminationReason) DeepCopy() *TerminationReason { - if in == nil { - return nil - } - out := new(TerminationReason) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ViewItem) DeepCopyInto(out *ViewItem) { - *out = *in - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(ViewType) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ViewItem. -func (in *ViewItem) DeepCopy() *ViewItem { - if in == nil { - return nil - } - out := new(ViewItem) - in.DeepCopyInto(out) - return out -} +// +build !ignore_autogenerated + +// Code generated by deepcopy-gen. DO NOT EDIT. + +package models + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AclItem) DeepCopyInto(out *AclItem) { + *out = *in + if in.Permission != nil { + in, out := &in.Permission, &out.Permission + *out = new(AclPermission) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AclItem. +func (in *AclItem) DeepCopy() *AclItem { + if in == nil { + return nil + } + out := new(AclItem) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AutoScale) DeepCopyInto(out *AutoScale) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoScale. +func (in *AutoScale) DeepCopy() *AutoScale { + if in == nil { + return nil + } + out := new(AutoScale) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AwsAttributes) DeepCopyInto(out *AwsAttributes) { + *out = *in + if in.Availability != nil { + in, out := &in.Availability, &out.Availability + *out = new(AwsAvailability) + **out = **in + } + if in.EbsVolumeType != nil { + in, out := &in.EbsVolumeType, &out.EbsVolumeType + *out = new(EbsVolumeType) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AwsAttributes. +func (in *AwsAttributes) DeepCopy() *AwsAttributes { + if in == nil { + return nil + } + out := new(AwsAttributes) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ClusterAttributes) DeepCopyInto(out *ClusterAttributes) { + *out = *in + if in.SparkConf != nil { + in, out := &in.SparkConf, &out.SparkConf + *out = new(SparkConfPair) + **out = **in + } + if in.AwsAttributes != nil { + in, out := &in.AwsAttributes, &out.AwsAttributes + *out = new(AwsAttributes) + (*in).DeepCopyInto(*out) + } + if in.SSHPublicKeys != nil { + in, out := &in.SSHPublicKeys, &out.SSHPublicKeys + *out = make([]string, len(*in)) + copy(*out, *in) + } + if in.CustomTags != nil { + in, out := &in.CustomTags, &out.CustomTags + *out = make([]ClusterTag, len(*in)) + copy(*out, *in) + } + if in.ClusterLogConf != nil { + in, out := &in.ClusterLogConf, &out.ClusterLogConf + *out = new(ClusterLogConf) + (*in).DeepCopyInto(*out) + } + if in.InitScripts != nil { + in, out := &in.InitScripts, &out.InitScripts + *out = make([]InitScriptInfo, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.SparkEnvVars != nil { + in, out := &in.SparkEnvVars, &out.SparkEnvVars + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + if in.ClusterSource != nil { + in, out := &in.ClusterSource, &out.ClusterSource + *out = new(AwsAvailability) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterAttributes. +func (in *ClusterAttributes) DeepCopy() *ClusterAttributes { + if in == nil { + return nil + } + out := new(ClusterAttributes) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ClusterCloudProviderNodeInfo) DeepCopyInto(out *ClusterCloudProviderNodeInfo) { + *out = *in + if in.Status != nil { + in, out := &in.Status, &out.Status + *out = new(ClusterCloudProviderNodeStatus) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterCloudProviderNodeInfo. +func (in *ClusterCloudProviderNodeInfo) DeepCopy() *ClusterCloudProviderNodeInfo { + if in == nil { + return nil + } + out := new(ClusterCloudProviderNodeInfo) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ClusterEvent) DeepCopyInto(out *ClusterEvent) { + *out = *in + if in.Type != nil { + in, out := &in.Type, &out.Type + *out = new(ClusterEventType) + **out = **in + } + if in.Details != nil { + in, out := &in.Details, &out.Details + *out = new(AwsAttributes) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterEvent. +func (in *ClusterEvent) DeepCopy() *ClusterEvent { + if in == nil { + return nil + } + out := new(ClusterEvent) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ClusterInfo) DeepCopyInto(out *ClusterInfo) { + *out = *in + if in.AutoScale != nil { + in, out := &in.AutoScale, &out.AutoScale + *out = new(AutoScale) + **out = **in + } + if in.Driver != nil { + in, out := &in.Driver, &out.Driver + *out = new(SparkNode) + (*in).DeepCopyInto(*out) + } + if in.Executors != nil { + in, out := &in.Executors, &out.Executors + *out = make([]SparkNode, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.SparkConf != nil { + in, out := &in.SparkConf, &out.SparkConf + *out = new(SparkConfPair) + **out = **in + } + if in.AwsAttributes != nil { + in, out := &in.AwsAttributes, &out.AwsAttributes + *out = new(AwsAttributes) + (*in).DeepCopyInto(*out) + } + if in.SSHPublicKeys != nil { + in, out := &in.SSHPublicKeys, &out.SSHPublicKeys + *out = make([]string, len(*in)) + copy(*out, *in) + } + if in.CustomTags != nil { + in, out := &in.CustomTags, &out.CustomTags + *out = make([]ClusterTag, len(*in)) + copy(*out, *in) + } + if in.ClusterLogConf != nil { + in, out := &in.ClusterLogConf, &out.ClusterLogConf + *out = new(ClusterLogConf) + (*in).DeepCopyInto(*out) + } + if in.InitScripts != nil { + in, out := &in.InitScripts, &out.InitScripts + *out = make([]InitScriptInfo, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.SparkEnvVars != nil { + in, out := &in.SparkEnvVars, &out.SparkEnvVars + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + if in.ClusterSource != nil { + in, out := &in.ClusterSource, &out.ClusterSource + *out = new(AwsAvailability) + **out = **in + } + if in.State != nil { + in, out := &in.State, &out.State + *out = new(ClusterState) + **out = **in + } + if in.DefaultTags != nil { + in, out := &in.DefaultTags, &out.DefaultTags + *out = make([]ClusterTag, len(*in)) + copy(*out, *in) + } + if in.ClusterLogStatus != nil { + in, out := &in.ClusterLogStatus, &out.ClusterLogStatus + *out = new(LogSyncStatus) + **out = **in + } + if in.TerminationReason != nil { + in, out := &in.TerminationReason, &out.TerminationReason + *out = new(S3StorageInfo) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterInfo. +func (in *ClusterInfo) DeepCopy() *ClusterInfo { + if in == nil { + return nil + } + out := new(ClusterInfo) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ClusterInstance) DeepCopyInto(out *ClusterInstance) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterInstance. +func (in *ClusterInstance) DeepCopy() *ClusterInstance { + if in == nil { + return nil + } + out := new(ClusterInstance) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ClusterLibraryStatuses) DeepCopyInto(out *ClusterLibraryStatuses) { + *out = *in + if in.LibraryStatuses != nil { + in, out := &in.LibraryStatuses, &out.LibraryStatuses + *out = make([]LibraryFullStatus, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterLibraryStatuses. +func (in *ClusterLibraryStatuses) DeepCopy() *ClusterLibraryStatuses { + if in == nil { + return nil + } + out := new(ClusterLibraryStatuses) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ClusterLogConf) DeepCopyInto(out *ClusterLogConf) { + *out = *in + if in.Dbfs != nil { + in, out := &in.Dbfs, &out.Dbfs + *out = new(DbfsStorageInfo) + **out = **in + } + if in.S3 != nil { + in, out := &in.S3, &out.S3 + *out = new(S3StorageInfo) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterLogConf. +func (in *ClusterLogConf) DeepCopy() *ClusterLogConf { + if in == nil { + return nil + } + out := new(ClusterLogConf) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ClusterSize) DeepCopyInto(out *ClusterSize) { + *out = *in + if in.Autoscale != nil { + in, out := &in.Autoscale, &out.Autoscale + *out = new(AutoScale) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSize. +func (in *ClusterSize) DeepCopy() *ClusterSize { + if in == nil { + return nil + } + out := new(ClusterSize) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ClusterSpec) DeepCopyInto(out *ClusterSpec) { + *out = *in + if in.NewCluster != nil { + in, out := &in.NewCluster, &out.NewCluster + *out = new(NewCluster) + (*in).DeepCopyInto(*out) + } + if in.Libraries != nil { + in, out := &in.Libraries, &out.Libraries + *out = make([]Library, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpec. +func (in *ClusterSpec) DeepCopy() *ClusterSpec { + if in == nil { + return nil + } + out := new(ClusterSpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ClusterTag) DeepCopyInto(out *ClusterTag) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterTag. +func (in *ClusterTag) DeepCopy() *ClusterTag { + if in == nil { + return nil + } + out := new(ClusterTag) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *CronSchedule) DeepCopyInto(out *CronSchedule) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CronSchedule. +func (in *CronSchedule) DeepCopy() *CronSchedule { + if in == nil { + return nil + } + out := new(CronSchedule) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DbfsStorageInfo) DeepCopyInto(out *DbfsStorageInfo) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsStorageInfo. +func (in *DbfsStorageInfo) DeepCopy() *DbfsStorageInfo { + if in == nil { + return nil + } + out := new(DbfsStorageInfo) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DiskSpec) DeepCopyInto(out *DiskSpec) { + *out = *in + if in.DiskType != nil { + in, out := &in.DiskType, &out.DiskType + *out = new(DiskType) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DiskSpec. +func (in *DiskSpec) DeepCopy() *DiskSpec { + if in == nil { + return nil + } + out := new(DiskSpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DiskType) DeepCopyInto(out *DiskType) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DiskType. +func (in *DiskType) DeepCopy() *DiskType { + if in == nil { + return nil + } + out := new(DiskType) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *EventDetails) DeepCopyInto(out *EventDetails) { + *out = *in + if in.PreviousAttributes != nil { + in, out := &in.PreviousAttributes, &out.PreviousAttributes + *out = new(ClusterAttributes) + (*in).DeepCopyInto(*out) + } + if in.Attributes != nil { + in, out := &in.Attributes, &out.Attributes + *out = new(ClusterAttributes) + (*in).DeepCopyInto(*out) + } + if in.PreviousClusterSize != nil { + in, out := &in.PreviousClusterSize, &out.PreviousClusterSize + *out = new(ClusterSize) + (*in).DeepCopyInto(*out) + } + if in.ClusterSize != nil { + in, out := &in.ClusterSize, &out.ClusterSize + *out = new(ClusterSize) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventDetails. +func (in *EventDetails) DeepCopy() *EventDetails { + if in == nil { + return nil + } + out := new(EventDetails) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *FileInfo) DeepCopyInto(out *FileInfo) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FileInfo. +func (in *FileInfo) DeepCopy() *FileInfo { + if in == nil { + return nil + } + out := new(FileInfo) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *InitScriptInfo) DeepCopyInto(out *InitScriptInfo) { + *out = *in + if in.Dbfs != nil { + in, out := &in.Dbfs, &out.Dbfs + *out = new(DbfsStorageInfo) + **out = **in + } + if in.S3 != nil { + in, out := &in.S3, &out.S3 + *out = new(S3StorageInfo) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InitScriptInfo. +func (in *InitScriptInfo) DeepCopy() *InitScriptInfo { + if in == nil { + return nil + } + out := new(InitScriptInfo) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *InstancePoolAndStats) DeepCopyInto(out *InstancePoolAndStats) { + *out = *in + out.AwsAttributes = in.AwsAttributes + if in.CustomTags != nil { + in, out := &in.CustomTags, &out.CustomTags + *out = make([]ClusterTag, len(*in)) + copy(*out, *in) + } + in.DiskSpec.DeepCopyInto(&out.DiskSpec) + if in.PreloadedSparkVersions != nil { + in, out := &in.PreloadedSparkVersions, &out.PreloadedSparkVersions + *out = make([]string, len(*in)) + copy(*out, *in) + } + if in.DefaultTags != nil { + in, out := &in.DefaultTags, &out.DefaultTags + *out = make([]ClusterTag, len(*in)) + copy(*out, *in) + } + out.Stats = in.Stats + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstancePoolAndStats. +func (in *InstancePoolAndStats) DeepCopy() *InstancePoolAndStats { + if in == nil { + return nil + } + out := new(InstancePoolAndStats) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *InstancePoolAwsAttributes) DeepCopyInto(out *InstancePoolAwsAttributes) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstancePoolAwsAttributes. +func (in *InstancePoolAwsAttributes) DeepCopy() *InstancePoolAwsAttributes { + if in == nil { + return nil + } + out := new(InstancePoolAwsAttributes) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *InstancePoolStats) DeepCopyInto(out *InstancePoolStats) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstancePoolStats. +func (in *InstancePoolStats) DeepCopy() *InstancePoolStats { + if in == nil { + return nil + } + out := new(InstancePoolStats) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *InstanceProfile) DeepCopyInto(out *InstanceProfile) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstanceProfile. +func (in *InstanceProfile) DeepCopy() *InstanceProfile { + if in == nil { + return nil + } + out := new(InstanceProfile) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Job) DeepCopyInto(out *Job) { + *out = *in + if in.Settings != nil { + in, out := &in.Settings, &out.Settings + *out = new(JobSettings) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Job. +func (in *Job) DeepCopy() *Job { + if in == nil { + return nil + } + out := new(Job) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *JobEmailNotifications) DeepCopyInto(out *JobEmailNotifications) { + *out = *in + if in.OnStart != nil { + in, out := &in.OnStart, &out.OnStart + *out = make([]string, len(*in)) + copy(*out, *in) + } + if in.OnSuccess != nil { + in, out := &in.OnSuccess, &out.OnSuccess + *out = make([]string, len(*in)) + copy(*out, *in) + } + if in.OnFailure != nil { + in, out := &in.OnFailure, &out.OnFailure + *out = make([]string, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobEmailNotifications. +func (in *JobEmailNotifications) DeepCopy() *JobEmailNotifications { + if in == nil { + return nil + } + out := new(JobEmailNotifications) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *JobSettings) DeepCopyInto(out *JobSettings) { + *out = *in + if in.NewCluster != nil { + in, out := &in.NewCluster, &out.NewCluster + *out = new(NewCluster) + (*in).DeepCopyInto(*out) + } + if in.NotebookTask != nil { + in, out := &in.NotebookTask, &out.NotebookTask + *out = new(NotebookTask) + (*in).DeepCopyInto(*out) + } + if in.SparkJarTask != nil { + in, out := &in.SparkJarTask, &out.SparkJarTask + *out = new(SparkJarTask) + (*in).DeepCopyInto(*out) + } + if in.SparkPythonTask != nil { + in, out := &in.SparkPythonTask, &out.SparkPythonTask + *out = new(SparkPythonTask) + (*in).DeepCopyInto(*out) + } + if in.SparkSubmitTask != nil { + in, out := &in.SparkSubmitTask, &out.SparkSubmitTask + *out = new(SparkSubmitTask) + (*in).DeepCopyInto(*out) + } + if in.Libraries != nil { + in, out := &in.Libraries, &out.Libraries + *out = make([]Library, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.EmailNotifications != nil { + in, out := &in.EmailNotifications, &out.EmailNotifications + *out = new(JobEmailNotifications) + (*in).DeepCopyInto(*out) + } + if in.Schedule != nil { + in, out := &in.Schedule, &out.Schedule + *out = new(CronSchedule) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSettings. +func (in *JobSettings) DeepCopy() *JobSettings { + if in == nil { + return nil + } + out := new(JobSettings) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *JobTask) DeepCopyInto(out *JobTask) { + *out = *in + if in.NotebookTask != nil { + in, out := &in.NotebookTask, &out.NotebookTask + *out = new(NotebookTask) + (*in).DeepCopyInto(*out) + } + if in.SparkJarTask != nil { + in, out := &in.SparkJarTask, &out.SparkJarTask + *out = new(SparkJarTask) + (*in).DeepCopyInto(*out) + } + if in.SparkPythonTask != nil { + in, out := &in.SparkPythonTask, &out.SparkPythonTask + *out = new(SparkPythonTask) + (*in).DeepCopyInto(*out) + } + if in.SparkSubmitTask != nil { + in, out := &in.SparkSubmitTask, &out.SparkSubmitTask + *out = new(SparkSubmitTask) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobTask. +func (in *JobTask) DeepCopy() *JobTask { + if in == nil { + return nil + } + out := new(JobTask) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Library) DeepCopyInto(out *Library) { + *out = *in + if in.Pypi != nil { + in, out := &in.Pypi, &out.Pypi + *out = new(PythonPyPiLibrary) + **out = **in + } + if in.Maven != nil { + in, out := &in.Maven, &out.Maven + *out = new(MavenLibrary) + (*in).DeepCopyInto(*out) + } + if in.Cran != nil { + in, out := &in.Cran, &out.Cran + *out = new(RCranLibrary) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Library. +func (in *Library) DeepCopy() *Library { + if in == nil { + return nil + } + out := new(Library) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *LibraryFullStatus) DeepCopyInto(out *LibraryFullStatus) { + *out = *in + if in.Library != nil { + in, out := &in.Library, &out.Library + *out = new(Library) + (*in).DeepCopyInto(*out) + } + if in.Status != nil { + in, out := &in.Status, &out.Status + *out = new(LibraryInstallStatus) + **out = **in + } + if in.Messages != nil { + in, out := &in.Messages, &out.Messages + *out = make([]string, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LibraryFullStatus. +func (in *LibraryFullStatus) DeepCopy() *LibraryFullStatus { + if in == nil { + return nil + } + out := new(LibraryFullStatus) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *LogSyncStatus) DeepCopyInto(out *LogSyncStatus) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogSyncStatus. +func (in *LogSyncStatus) DeepCopy() *LogSyncStatus { + if in == nil { + return nil + } + out := new(LogSyncStatus) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *MavenLibrary) DeepCopyInto(out *MavenLibrary) { + *out = *in + if in.Exclusions != nil { + in, out := &in.Exclusions, &out.Exclusions + *out = make([]string, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MavenLibrary. +func (in *MavenLibrary) DeepCopy() *MavenLibrary { + if in == nil { + return nil + } + out := new(MavenLibrary) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *NewCluster) DeepCopyInto(out *NewCluster) { + *out = *in + if in.Autoscale != nil { + in, out := &in.Autoscale, &out.Autoscale + *out = new(AutoScale) + **out = **in + } + if in.SparkConf != nil { + in, out := &in.SparkConf, &out.SparkConf + *out = new(SparkConfPair) + **out = **in + } + if in.AwsAttributes != nil { + in, out := &in.AwsAttributes, &out.AwsAttributes + *out = new(AwsAttributes) + (*in).DeepCopyInto(*out) + } + if in.SSHPublicKeys != nil { + in, out := &in.SSHPublicKeys, &out.SSHPublicKeys + *out = make([]string, len(*in)) + copy(*out, *in) + } + if in.CustomTags != nil { + in, out := &in.CustomTags, &out.CustomTags + *out = make([]ClusterTag, len(*in)) + copy(*out, *in) + } + if in.ClusterLogConf != nil { + in, out := &in.ClusterLogConf, &out.ClusterLogConf + *out = new(ClusterLogConf) + (*in).DeepCopyInto(*out) + } + if in.InitScripts != nil { + in, out := &in.InitScripts, &out.InitScripts + *out = make([]InitScriptInfo, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.SparkEnvVars != nil { + in, out := &in.SparkEnvVars, &out.SparkEnvVars + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NewCluster. +func (in *NewCluster) DeepCopy() *NewCluster { + if in == nil { + return nil + } + out := new(NewCluster) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *NodeType) DeepCopyInto(out *NodeType) { + *out = *in + if in.NodeInfo != nil { + in, out := &in.NodeInfo, &out.NodeInfo + *out = new(ClusterCloudProviderNodeInfo) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodeType. +func (in *NodeType) DeepCopy() *NodeType { + if in == nil { + return nil + } + out := new(NodeType) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *NotebookOutput) DeepCopyInto(out *NotebookOutput) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NotebookOutput. +func (in *NotebookOutput) DeepCopy() *NotebookOutput { + if in == nil { + return nil + } + out := new(NotebookOutput) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *NotebookTask) DeepCopyInto(out *NotebookTask) { + *out = *in + if in.BaseParameters != nil { + in, out := &in.BaseParameters, &out.BaseParameters + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NotebookTask. +func (in *NotebookTask) DeepCopy() *NotebookTask { + if in == nil { + return nil + } + out := new(NotebookTask) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ObjectInfo) DeepCopyInto(out *ObjectInfo) { + *out = *in + if in.ObjectType != nil { + in, out := &in.ObjectType, &out.ObjectType + *out = new(ObjectType) + **out = **in + } + if in.Language != nil { + in, out := &in.Language, &out.Language + *out = new(Language) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ObjectInfo. +func (in *ObjectInfo) DeepCopy() *ObjectInfo { + if in == nil { + return nil + } + out := new(ObjectInfo) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ParamPair) DeepCopyInto(out *ParamPair) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParamPair. +func (in *ParamPair) DeepCopy() *ParamPair { + if in == nil { + return nil + } + out := new(ParamPair) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ParameterPair) DeepCopyInto(out *ParameterPair) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParameterPair. +func (in *ParameterPair) DeepCopy() *ParameterPair { + if in == nil { + return nil + } + out := new(ParameterPair) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PrincipalName) DeepCopyInto(out *PrincipalName) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrincipalName. +func (in *PrincipalName) DeepCopy() *PrincipalName { + if in == nil { + return nil + } + out := new(PrincipalName) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PublicTokenInfo) DeepCopyInto(out *PublicTokenInfo) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PublicTokenInfo. +func (in *PublicTokenInfo) DeepCopy() *PublicTokenInfo { + if in == nil { + return nil + } + out := new(PublicTokenInfo) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PythonPyPiLibrary) DeepCopyInto(out *PythonPyPiLibrary) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PythonPyPiLibrary. +func (in *PythonPyPiLibrary) DeepCopy() *PythonPyPiLibrary { + if in == nil { + return nil + } + out := new(PythonPyPiLibrary) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *RCranLibrary) DeepCopyInto(out *RCranLibrary) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RCranLibrary. +func (in *RCranLibrary) DeepCopy() *RCranLibrary { + if in == nil { + return nil + } + out := new(RCranLibrary) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Run) DeepCopyInto(out *Run) { + *out = *in + if in.State != nil { + in, out := &in.State, &out.State + *out = new(RunState) + (*in).DeepCopyInto(*out) + } + if in.Schedule != nil { + in, out := &in.Schedule, &out.Schedule + *out = new(CronSchedule) + **out = **in + } + if in.Task != nil { + in, out := &in.Task, &out.Task + *out = new(JobTask) + (*in).DeepCopyInto(*out) + } + if in.ClusterSpec != nil { + in, out := &in.ClusterSpec, &out.ClusterSpec + *out = new(ClusterSpec) + (*in).DeepCopyInto(*out) + } + if in.ClusterInstance != nil { + in, out := &in.ClusterInstance, &out.ClusterInstance + *out = new(ClusterInstance) + **out = **in + } + if in.OverridingParameters != nil { + in, out := &in.OverridingParameters, &out.OverridingParameters + *out = new(RunParameters) + (*in).DeepCopyInto(*out) + } + if in.Trigger != nil { + in, out := &in.Trigger, &out.Trigger + *out = new(TriggerType) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Run. +func (in *Run) DeepCopy() *Run { + if in == nil { + return nil + } + out := new(Run) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *RunParameters) DeepCopyInto(out *RunParameters) { + *out = *in + if in.JarParams != nil { + in, out := &in.JarParams, &out.JarParams + *out = make([]string, len(*in)) + copy(*out, *in) + } + if in.NotebookParams != nil { + in, out := &in.NotebookParams, &out.NotebookParams + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + if in.PythonParams != nil { + in, out := &in.PythonParams, &out.PythonParams + *out = make([]string, len(*in)) + copy(*out, *in) + } + if in.SparkSubmitParams != nil { + in, out := &in.SparkSubmitParams, &out.SparkSubmitParams + *out = make([]string, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RunParameters. +func (in *RunParameters) DeepCopy() *RunParameters { + if in == nil { + return nil + } + out := new(RunParameters) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *RunState) DeepCopyInto(out *RunState) { + *out = *in + if in.LifeCycleState != nil { + in, out := &in.LifeCycleState, &out.LifeCycleState + *out = new(RunLifeCycleState) + **out = **in + } + if in.ResultState != nil { + in, out := &in.ResultState, &out.ResultState + *out = new(RunResultState) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RunState. +func (in *RunState) DeepCopy() *RunState { + if in == nil { + return nil + } + out := new(RunState) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *S3StorageInfo) DeepCopyInto(out *S3StorageInfo) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new S3StorageInfo. +func (in *S3StorageInfo) DeepCopy() *S3StorageInfo { + if in == nil { + return nil + } + out := new(S3StorageInfo) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SecretMetadata) DeepCopyInto(out *SecretMetadata) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecretMetadata. +func (in *SecretMetadata) DeepCopy() *SecretMetadata { + if in == nil { + return nil + } + out := new(SecretMetadata) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SecretScope) DeepCopyInto(out *SecretScope) { + *out = *in + if in.BackendType != nil { + in, out := &in.BackendType, &out.BackendType + *out = new(ScopeBackendType) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecretScope. +func (in *SecretScope) DeepCopy() *SecretScope { + if in == nil { + return nil + } + out := new(SecretScope) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SparkConfPair) DeepCopyInto(out *SparkConfPair) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkConfPair. +func (in *SparkConfPair) DeepCopy() *SparkConfPair { + if in == nil { + return nil + } + out := new(SparkConfPair) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SparkEnvPair) DeepCopyInto(out *SparkEnvPair) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkEnvPair. +func (in *SparkEnvPair) DeepCopy() *SparkEnvPair { + if in == nil { + return nil + } + out := new(SparkEnvPair) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SparkJarTask) DeepCopyInto(out *SparkJarTask) { + *out = *in + if in.Parameters != nil { + in, out := &in.Parameters, &out.Parameters + *out = make([]string, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkJarTask. +func (in *SparkJarTask) DeepCopy() *SparkJarTask { + if in == nil { + return nil + } + out := new(SparkJarTask) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SparkNode) DeepCopyInto(out *SparkNode) { + *out = *in + if in.NodeAwsAttributes != nil { + in, out := &in.NodeAwsAttributes, &out.NodeAwsAttributes + *out = new(SparkNodeAwsAttributes) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkNode. +func (in *SparkNode) DeepCopy() *SparkNode { + if in == nil { + return nil + } + out := new(SparkNode) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SparkNodeAwsAttributes) DeepCopyInto(out *SparkNodeAwsAttributes) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkNodeAwsAttributes. +func (in *SparkNodeAwsAttributes) DeepCopy() *SparkNodeAwsAttributes { + if in == nil { + return nil + } + out := new(SparkNodeAwsAttributes) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SparkPythonTask) DeepCopyInto(out *SparkPythonTask) { + *out = *in + if in.Parameters != nil { + in, out := &in.Parameters, &out.Parameters + *out = make([]string, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkPythonTask. +func (in *SparkPythonTask) DeepCopy() *SparkPythonTask { + if in == nil { + return nil + } + out := new(SparkPythonTask) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SparkSubmitTask) DeepCopyInto(out *SparkSubmitTask) { + *out = *in + if in.Parameters != nil { + in, out := &in.Parameters, &out.Parameters + *out = make([]string, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkSubmitTask. +func (in *SparkSubmitTask) DeepCopy() *SparkSubmitTask { + if in == nil { + return nil + } + out := new(SparkSubmitTask) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SparkVersion) DeepCopyInto(out *SparkVersion) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkVersion. +func (in *SparkVersion) DeepCopy() *SparkVersion { + if in == nil { + return nil + } + out := new(SparkVersion) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *TerminationReason) DeepCopyInto(out *TerminationReason) { + *out = *in + if in.Code != nil { + in, out := &in.Code, &out.Code + *out = new(TerminationCode) + **out = **in + } + if in.Parameters != nil { + in, out := &in.Parameters, &out.Parameters + *out = make([]ParameterPair, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TerminationReason. +func (in *TerminationReason) DeepCopy() *TerminationReason { + if in == nil { + return nil + } + out := new(TerminationReason) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ViewItem) DeepCopyInto(out *ViewItem) { + *out = *in + if in.Type != nil { + in, out := &in.Type, &out.Type + *out = new(ViewType) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ViewItem. +func (in *ViewItem) DeepCopy() *ViewItem { + if in == nil { + return nil + } + out := new(ViewItem) + in.DeepCopyInto(out) + return out +} diff --git a/aws/models/doc.go b/aws/models/doc.go index 2073e9a..aa00136 100644 --- a/aws/models/doc.go +++ b/aws/models/doc.go @@ -1,2 +1,2 @@ -// +k8s:deepcopy-gen=package -package models +// +k8s:deepcopy-gen=package +package models diff --git a/aws/scim.go b/aws/scim.go index 4b9398e..4ddd067 100644 --- a/aws/scim.go +++ b/aws/scim.go @@ -1,11 +1,11 @@ -package aws - -// ScimAPI exposes the SCIM API -type ScimAPI struct { - Client DBClient -} - -func (a ScimAPI) init(client DBClient) ScimAPI { - a.Client = client - return a -} +package aws + +// ScimAPI exposes the SCIM API +type ScimAPI struct { + Client DBClient +} + +func (a ScimAPI) init(client DBClient) ScimAPI { + a.Client = client + return a +} diff --git a/aws/secrets.go b/aws/secrets.go index 88bdbbf..8501e91 100644 --- a/aws/secrets.go +++ b/aws/secrets.go @@ -1,190 +1,190 @@ -package aws - -import ( - "encoding/base64" - "encoding/json" - "net/http" - - "github.com/xinsnake/databricks-sdk-golang/aws/models" -) - -// SecretsAPI exposes the Secrets API -type SecretsAPI struct { - Client DBClient -} - -func (a SecretsAPI) init(client DBClient) SecretsAPI { - a.Client = client - return a -} - -// CreateSecretScope creates a new secret scope -func (a SecretsAPI) CreateSecretScope(scope, initialManagePrincipal string) error { - data := struct { - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - InitialManagePrincipal string `json:"initial_manage_principal,omitempty" url:"initial_manage_principal,omitempty"` - }{ - scope, - initialManagePrincipal, - } - _, err := a.Client.performQuery(http.MethodPost, "/secrets/scopes/create", data, nil) - return err -} - -// DeleteSecretScope deletes a secret scope -func (a SecretsAPI) DeleteSecretScope(scope string) error { - data := struct { - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - }{ - scope, - } - _, err := a.Client.performQuery(http.MethodPost, "/secrets/scopes/delete", data, nil) - return err -} - -// ListSecretScopes lists all secret scopes available in the workspace -func (a SecretsAPI) ListSecretScopes() ([]models.SecretScope, error) { - var listSecretScopesResponse struct { - Scopes []models.SecretScope `json:"scopes,omitempty" url:"scopes,omitempty"` - } - - resp, err := a.Client.performQuery(http.MethodGet, "/secrets/scopes/list", nil, nil) - if err != nil { - return listSecretScopesResponse.Scopes, err - } - - err = json.Unmarshal(resp, &listSecretScopesResponse) - return listSecretScopesResponse.Scopes, err -} - -// PutSecret creates or modifies a bytes secret depends on the type of scope backend with -func (a SecretsAPI) PutSecret(bytesValue []byte, scope, key string) error { - data := struct { - BytesValue string `json:"bytes_value,omitempty" url:"bytes_value,omitempty"` - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - Key string `json:"key,omitempty" url:"key,omitempty"` - }{ - base64.StdEncoding.EncodeToString(bytesValue), - scope, - key, - } - _, err := a.Client.performQuery(http.MethodPost, "/secrets/put", data, nil) - return err -} - -// PutSecretString creates or modifies a string secret depends on the type of scope backend -func (a SecretsAPI) PutSecretString(stringValue, scope, key string) error { - data := struct { - StringValue string `json:"string_value,omitempty" url:"string_value,omitempty"` - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - Key string `json:"key,omitempty" url:"key,omitempty"` - }{ - stringValue, - scope, - key, - } - _, err := a.Client.performQuery(http.MethodPost, "/secrets/put", data, nil) - return err -} - -// DeleteSecret deletes a secret depends on the type of scope backend -func (a SecretsAPI) DeleteSecret(scope, key string) error { - data := struct { - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - Key string `json:"key,omitempty" url:"key,omitempty"` - }{ - scope, - key, - } - _, err := a.Client.performQuery(http.MethodPost, "/secrets/delete", data, nil) - return err -} - -// ListSecrets lists the secret keys that are stored at this scope -func (a SecretsAPI) ListSecrets(scope string) ([]models.SecretMetadata, error) { - var secretsList struct { - Secrets []models.SecretMetadata `json:"secrets,omitempty" url:"secrets,omitempty"` - } - - data := struct { - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - }{ - scope, - } - - resp, err := a.Client.performQuery(http.MethodGet, "/secrets/list", data, nil) - if err != nil { - return secretsList.Secrets, err - } - - err = json.Unmarshal(resp, &secretsList) - return secretsList.Secrets, err -} - -// PutSecretACL creates or overwrites the ACL associated with the given principal (user or group) on the specified scope point -func (a SecretsAPI) PutSecretACL(scope, principal string, permission models.AclPermission) error { - data := struct { - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - Principal string `json:"principal,omitempty" url:"principal,omitempty"` - Permission models.AclPermission `json:"permission,omitempty" url:"permission,omitempty"` - }{ - scope, - principal, - permission, - } - _, err := a.Client.performQuery(http.MethodPost, "/secrets/acls/put", data, nil) - return err -} - -// DeleteSecretACL deletes the given ACL on the given scope -func (a SecretsAPI) DeleteSecretACL(scope, principal string) error { - data := struct { - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - Principal string `json:"principal,omitempty" url:"principal,omitempty"` - }{ - scope, - principal, - } - _, err := a.Client.performQuery(http.MethodPost, "/secrets/acls/delete", data, nil) - return err -} - -// GetSecretACL describe the details about the given ACL, such as the group and permission -func (a SecretsAPI) GetSecretACL(scope, principal string) (models.AclItem, error) { - var aclItem models.AclItem - - data := struct { - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - Principal string `json:"principal,omitempty" url:"principal,omitempty"` - }{ - scope, - principal, - } - resp, err := a.Client.performQuery(http.MethodGet, "/secrets/acls/get", data, nil) - if err != nil { - return aclItem, err - } - - err = json.Unmarshal(resp, &aclItem) - return aclItem, err -} - -// ListSecretACLs lists the ACLs set on the given scope -func (a SecretsAPI) ListSecretACLs(scope string) ([]models.AclItem, error) { - var aclItem struct { - Acls []models.AclItem `json:"acls,omitempty" url:"acls,omitempty"` - } - - data := struct { - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - }{ - scope, - } - resp, err := a.Client.performQuery(http.MethodGet, "/secrets/acls/list", data, nil) - if err != nil { - return aclItem.Acls, err - } - - err = json.Unmarshal(resp, &aclItem) - return aclItem.Acls, err -} +package aws + +import ( + "encoding/base64" + "encoding/json" + "net/http" + + "github.com/xinsnake/databricks-sdk-golang/aws/models" +) + +// SecretsAPI exposes the Secrets API +type SecretsAPI struct { + Client DBClient +} + +func (a SecretsAPI) init(client DBClient) SecretsAPI { + a.Client = client + return a +} + +// CreateSecretScope creates a new secret scope +func (a SecretsAPI) CreateSecretScope(scope, initialManagePrincipal string) error { + data := struct { + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + InitialManagePrincipal string `json:"initial_manage_principal,omitempty" url:"initial_manage_principal,omitempty"` + }{ + scope, + initialManagePrincipal, + } + _, err := a.Client.performQuery(http.MethodPost, "/secrets/scopes/create", data, nil) + return err +} + +// DeleteSecretScope deletes a secret scope +func (a SecretsAPI) DeleteSecretScope(scope string) error { + data := struct { + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + }{ + scope, + } + _, err := a.Client.performQuery(http.MethodPost, "/secrets/scopes/delete", data, nil) + return err +} + +// ListSecretScopes lists all secret scopes available in the workspace +func (a SecretsAPI) ListSecretScopes() ([]models.SecretScope, error) { + var listSecretScopesResponse struct { + Scopes []models.SecretScope `json:"scopes,omitempty" url:"scopes,omitempty"` + } + + resp, err := a.Client.performQuery(http.MethodGet, "/secrets/scopes/list", nil, nil) + if err != nil { + return listSecretScopesResponse.Scopes, err + } + + err = json.Unmarshal(resp, &listSecretScopesResponse) + return listSecretScopesResponse.Scopes, err +} + +// PutSecret creates or modifies a bytes secret depends on the type of scope backend with +func (a SecretsAPI) PutSecret(bytesValue []byte, scope, key string) error { + data := struct { + BytesValue string `json:"bytes_value,omitempty" url:"bytes_value,omitempty"` + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + Key string `json:"key,omitempty" url:"key,omitempty"` + }{ + base64.StdEncoding.EncodeToString(bytesValue), + scope, + key, + } + _, err := a.Client.performQuery(http.MethodPost, "/secrets/put", data, nil) + return err +} + +// PutSecretString creates or modifies a string secret depends on the type of scope backend +func (a SecretsAPI) PutSecretString(stringValue, scope, key string) error { + data := struct { + StringValue string `json:"string_value,omitempty" url:"string_value,omitempty"` + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + Key string `json:"key,omitempty" url:"key,omitempty"` + }{ + stringValue, + scope, + key, + } + _, err := a.Client.performQuery(http.MethodPost, "/secrets/put", data, nil) + return err +} + +// DeleteSecret deletes a secret depends on the type of scope backend +func (a SecretsAPI) DeleteSecret(scope, key string) error { + data := struct { + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + Key string `json:"key,omitempty" url:"key,omitempty"` + }{ + scope, + key, + } + _, err := a.Client.performQuery(http.MethodPost, "/secrets/delete", data, nil) + return err +} + +// ListSecrets lists the secret keys that are stored at this scope +func (a SecretsAPI) ListSecrets(scope string) ([]models.SecretMetadata, error) { + var secretsList struct { + Secrets []models.SecretMetadata `json:"secrets,omitempty" url:"secrets,omitempty"` + } + + data := struct { + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + }{ + scope, + } + + resp, err := a.Client.performQuery(http.MethodGet, "/secrets/list", data, nil) + if err != nil { + return secretsList.Secrets, err + } + + err = json.Unmarshal(resp, &secretsList) + return secretsList.Secrets, err +} + +// PutSecretACL creates or overwrites the ACL associated with the given principal (user or group) on the specified scope point +func (a SecretsAPI) PutSecretACL(scope, principal string, permission models.AclPermission) error { + data := struct { + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + Principal string `json:"principal,omitempty" url:"principal,omitempty"` + Permission models.AclPermission `json:"permission,omitempty" url:"permission,omitempty"` + }{ + scope, + principal, + permission, + } + _, err := a.Client.performQuery(http.MethodPost, "/secrets/acls/put", data, nil) + return err +} + +// DeleteSecretACL deletes the given ACL on the given scope +func (a SecretsAPI) DeleteSecretACL(scope, principal string) error { + data := struct { + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + Principal string `json:"principal,omitempty" url:"principal,omitempty"` + }{ + scope, + principal, + } + _, err := a.Client.performQuery(http.MethodPost, "/secrets/acls/delete", data, nil) + return err +} + +// GetSecretACL describe the details about the given ACL, such as the group and permission +func (a SecretsAPI) GetSecretACL(scope, principal string) (models.AclItem, error) { + var aclItem models.AclItem + + data := struct { + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + Principal string `json:"principal,omitempty" url:"principal,omitempty"` + }{ + scope, + principal, + } + resp, err := a.Client.performQuery(http.MethodGet, "/secrets/acls/get", data, nil) + if err != nil { + return aclItem, err + } + + err = json.Unmarshal(resp, &aclItem) + return aclItem, err +} + +// ListSecretACLs lists the ACLs set on the given scope +func (a SecretsAPI) ListSecretACLs(scope string) ([]models.AclItem, error) { + var aclItem struct { + Acls []models.AclItem `json:"acls,omitempty" url:"acls,omitempty"` + } + + data := struct { + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + }{ + scope, + } + resp, err := a.Client.performQuery(http.MethodGet, "/secrets/acls/list", data, nil) + if err != nil { + return aclItem.Acls, err + } + + err = json.Unmarshal(resp, &aclItem) + return aclItem.Acls, err +} diff --git a/aws/token.go b/aws/token.go index dbdc224..d71d5bf 100644 --- a/aws/token.go +++ b/aws/token.go @@ -1,70 +1,70 @@ -package aws - -import ( - "encoding/json" - "net/http" - - "github.com/xinsnake/databricks-sdk-golang/aws/models" -) - -// TokenAPI exposes the Token API -type TokenAPI struct { - Client DBClient -} - -func (a TokenAPI) init(client DBClient) TokenAPI { - a.Client = client - return a -} - -// TokenCreateResponse is the response from Create -type TokenCreateResponse struct { - TokenValue string `json:"token_value,omitempty" url:"token_value,omitempty"` - TokenInfo models.PublicTokenInfo `json:"token_info,omitempty" url:"token_info,omitempty"` -} - -// Create creates and return a token -func (a SecretsAPI) Create(lifetimeSeconds int64, comment string) (TokenCreateResponse, error) { - var createResponse TokenCreateResponse - - data := struct { - LifetimeSeconds int64 `json:"lifetime_seconds,omitempty" url:"lifetime_seconds,omitempty"` - Comment string `json:"comment,omitempty" url:"comment,omitempty"` - }{ - lifetimeSeconds, - comment, - } - resp, err := a.Client.performQuery(http.MethodPost, "/token/create", data, nil) - if err != nil { - return createResponse, err - } - - err = json.Unmarshal(resp, &createResponse) - return createResponse, err -} - -// List lists all the valid tokens for a user-workspace pair -func (a SecretsAPI) List() ([]models.PublicTokenInfo, error) { - var publicTokenInfo struct { - TokenInfos []models.PublicTokenInfo `json:"token_infos,omitempty" url:"token_infos,omitempty"` - } - - resp, err := a.Client.performQuery(http.MethodGet, "/token/list", nil, nil) - if err != nil { - return publicTokenInfo.TokenInfos, err - } - - err = json.Unmarshal(resp, &publicTokenInfo) - return publicTokenInfo.TokenInfos, err -} - -// Revoke revokes an access token -func (a SecretsAPI) Revoke(tokenID string) error { - data := struct { - TokenID string `json:"token_id,omitempty" url:"token_id,omitempty"` - }{ - tokenID, - } - _, err := a.Client.performQuery(http.MethodPost, "/token/delete", data, nil) - return err -} +package aws + +import ( + "encoding/json" + "net/http" + + "github.com/xinsnake/databricks-sdk-golang/aws/models" +) + +// TokenAPI exposes the Token API +type TokenAPI struct { + Client DBClient +} + +func (a TokenAPI) init(client DBClient) TokenAPI { + a.Client = client + return a +} + +// TokenCreateResponse is the response from Create +type TokenCreateResponse struct { + TokenValue string `json:"token_value,omitempty" url:"token_value,omitempty"` + TokenInfo models.PublicTokenInfo `json:"token_info,omitempty" url:"token_info,omitempty"` +} + +// Create creates and return a token +func (a SecretsAPI) Create(lifetimeSeconds int64, comment string) (TokenCreateResponse, error) { + var createResponse TokenCreateResponse + + data := struct { + LifetimeSeconds int64 `json:"lifetime_seconds,omitempty" url:"lifetime_seconds,omitempty"` + Comment string `json:"comment,omitempty" url:"comment,omitempty"` + }{ + lifetimeSeconds, + comment, + } + resp, err := a.Client.performQuery(http.MethodPost, "/token/create", data, nil) + if err != nil { + return createResponse, err + } + + err = json.Unmarshal(resp, &createResponse) + return createResponse, err +} + +// List lists all the valid tokens for a user-workspace pair +func (a SecretsAPI) List() ([]models.PublicTokenInfo, error) { + var publicTokenInfo struct { + TokenInfos []models.PublicTokenInfo `json:"token_infos,omitempty" url:"token_infos,omitempty"` + } + + resp, err := a.Client.performQuery(http.MethodGet, "/token/list", nil, nil) + if err != nil { + return publicTokenInfo.TokenInfos, err + } + + err = json.Unmarshal(resp, &publicTokenInfo) + return publicTokenInfo.TokenInfos, err +} + +// Revoke revokes an access token +func (a SecretsAPI) Revoke(tokenID string) error { + data := struct { + TokenID string `json:"token_id,omitempty" url:"token_id,omitempty"` + }{ + tokenID, + } + _, err := a.Client.performQuery(http.MethodPost, "/token/delete", data, nil) + return err +} diff --git a/aws/workspace.go b/aws/workspace.go index d088793..1071242 100644 --- a/aws/workspace.go +++ b/aws/workspace.go @@ -1,133 +1,133 @@ -package aws - -import ( - "encoding/base64" - "encoding/json" - "net/http" - - "github.com/xinsnake/databricks-sdk-golang/aws/models" -) - -// WorkspaceAPI exposes the Workspace API -type WorkspaceAPI struct { - Client DBClient -} - -func (a WorkspaceAPI) init(client DBClient) WorkspaceAPI { - a.Client = client - return a -} - -// Delete deletes an object or a directory (and optionally recursively deletes all objects in the directory) -func (a WorkspaceAPI) Delete(path string, recursive bool) error { - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - Recursive bool `json:"recursive,omitempty" url:"recursive,omitempty"` - }{ - path, - recursive, - } - _, err := a.Client.performQuery(http.MethodPost, "/workspace/delete", data, nil) - return err -} - -// Export exports a notebook or contents of an entire directory -func (a WorkspaceAPI) Export(path string, format models.ExportFormat, directDownload bool) ([]byte, error) { - var exportResponse struct { - Content string `json:"content,omitempty" url:"content,omitempty"` - } - - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - Format models.ExportFormat `json:"format,omitempty" url:"format,omitempty"` - DirectDownload bool `json:"direct_download,omitempty" url:"direct_download,omitempty"` - }{ - path, - format, - directDownload, - } - - resp, err := a.Client.performQuery(http.MethodGet, "/workspace/export", data, nil) - if err != nil { - return []byte{}, err - } - - err = json.Unmarshal(resp, &exportResponse) - if err != nil { - return []byte{}, err - } - - return base64.StdEncoding.DecodeString(exportResponse.Content) -} - -// GetStatus gets the status of an object or a directory -func (a WorkspaceAPI) GetStatus(path string) (models.ObjectInfo, error) { - var objectInfo models.ObjectInfo - - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - }{ - path, - } - - resp, err := a.Client.performQuery(http.MethodGet, "/workspace/get-status", data, nil) - if err != nil { - return objectInfo, err - } - - err = json.Unmarshal(resp, &objectInfo) - return objectInfo, err -} - -// Import imports a notebook or the contents of an entire directory -func (a WorkspaceAPI) Import(path string, format models.ExportFormat, - language models.Language, content []byte, overwrite bool) error { - - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - Format models.ExportFormat `json:"format,omitempty" url:"format,omitempty"` - Language models.Language `json:"language,omitempty" url:"language,omitempty"` - Content string `json:"content,omitempty" url:"content,omitempty"` - Overwrite bool `json:"overwrite,omitempty" url:"overwrite,omitempty"` - }{ - path, - format, - language, - base64.StdEncoding.EncodeToString(content), - overwrite, - } - _, err := a.Client.performQuery(http.MethodPost, "/workspace/import", data, nil) - return err -} - -// List lists the contents of a directory, or the object if it is not a directory -func (a WorkspaceAPI) List(path string) ([]models.ObjectInfo, error) { - var listResponse struct { - Objects []models.ObjectInfo `json:"objects,omitempty" url:"objects,omitempty"` - } - - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - }{ - path, - } - - resp, err := a.Client.performQuery(http.MethodGet, "/workspace/list", data, nil) - if err != nil { - return listResponse.Objects, err - } - - err = json.Unmarshal(resp, &listResponse) - return listResponse.Objects, err -} - -// Mkdirs creates the given directory and necessary parent directories if they do not exists -func (a WorkspaceAPI) Mkdirs(path string) error { - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - }{ - path, - } - _, err := a.Client.performQuery(http.MethodPost, "/workspace/mkdirs", data, nil) - return err -} +package aws + +import ( + "encoding/base64" + "encoding/json" + "net/http" + + "github.com/xinsnake/databricks-sdk-golang/aws/models" +) + +// WorkspaceAPI exposes the Workspace API +type WorkspaceAPI struct { + Client DBClient +} + +func (a WorkspaceAPI) init(client DBClient) WorkspaceAPI { + a.Client = client + return a +} + +// Delete deletes an object or a directory (and optionally recursively deletes all objects in the directory) +func (a WorkspaceAPI) Delete(path string, recursive bool) error { + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + Recursive bool `json:"recursive,omitempty" url:"recursive,omitempty"` + }{ + path, + recursive, + } + _, err := a.Client.performQuery(http.MethodPost, "/workspace/delete", data, nil) + return err +} + +// Export exports a notebook or contents of an entire directory +func (a WorkspaceAPI) Export(path string, format models.ExportFormat, directDownload bool) ([]byte, error) { + var exportResponse struct { + Content string `json:"content,omitempty" url:"content,omitempty"` + } + + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + Format models.ExportFormat `json:"format,omitempty" url:"format,omitempty"` + DirectDownload bool `json:"direct_download,omitempty" url:"direct_download,omitempty"` + }{ + path, + format, + directDownload, + } + + resp, err := a.Client.performQuery(http.MethodGet, "/workspace/export", data, nil) + if err != nil { + return []byte{}, err + } + + err = json.Unmarshal(resp, &exportResponse) + if err != nil { + return []byte{}, err + } + + return base64.StdEncoding.DecodeString(exportResponse.Content) +} + +// GetStatus gets the status of an object or a directory +func (a WorkspaceAPI) GetStatus(path string) (models.ObjectInfo, error) { + var objectInfo models.ObjectInfo + + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + }{ + path, + } + + resp, err := a.Client.performQuery(http.MethodGet, "/workspace/get-status", data, nil) + if err != nil { + return objectInfo, err + } + + err = json.Unmarshal(resp, &objectInfo) + return objectInfo, err +} + +// Import imports a notebook or the contents of an entire directory +func (a WorkspaceAPI) Import(path string, format models.ExportFormat, + language models.Language, content []byte, overwrite bool) error { + + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + Format models.ExportFormat `json:"format,omitempty" url:"format,omitempty"` + Language models.Language `json:"language,omitempty" url:"language,omitempty"` + Content string `json:"content,omitempty" url:"content,omitempty"` + Overwrite bool `json:"overwrite,omitempty" url:"overwrite,omitempty"` + }{ + path, + format, + language, + base64.StdEncoding.EncodeToString(content), + overwrite, + } + _, err := a.Client.performQuery(http.MethodPost, "/workspace/import", data, nil) + return err +} + +// List lists the contents of a directory, or the object if it is not a directory +func (a WorkspaceAPI) List(path string) ([]models.ObjectInfo, error) { + var listResponse struct { + Objects []models.ObjectInfo `json:"objects,omitempty" url:"objects,omitempty"` + } + + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + }{ + path, + } + + resp, err := a.Client.performQuery(http.MethodGet, "/workspace/list", data, nil) + if err != nil { + return listResponse.Objects, err + } + + err = json.Unmarshal(resp, &listResponse) + return listResponse.Objects, err +} + +// Mkdirs creates the given directory and necessary parent directories if they do not exists +func (a WorkspaceAPI) Mkdirs(path string) error { + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + }{ + path, + } + _, err := a.Client.performQuery(http.MethodPost, "/workspace/mkdirs", data, nil) + return err +} diff --git a/azure/azure_suite_test.go b/azure/azure_suite_test.go index 54bb2a5..c204140 100644 --- a/azure/azure_suite_test.go +++ b/azure/azure_suite_test.go @@ -1,13 +1,13 @@ -package azure_test - -import ( - "testing" - - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" -) - -func TestAzure(t *testing.T) { - RegisterFailHandler(Fail) - RunSpecs(t, "Azure Suite") -} +package azure_test + +import ( + "testing" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +func TestAzure(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Azure Suite") +} diff --git a/azure/client.go b/azure/client.go index 621e4b3..873a8e3 100644 --- a/azure/client.go +++ b/azure/client.go @@ -1,79 +1,79 @@ -package azure - -import databricks "github.com/xinsnake/databricks-sdk-golang" - -// DBClient is the client for Azure implements DBClient -type DBClient struct { - Option databricks.DBClientOption -} - -// Init initializes the client -func (c *DBClient) Init(option databricks.DBClientOption) DBClient { - c.Option = option - option.Init() - return *c -} - -// Clusters returns an instance of ClustersAPI -func (c DBClient) Clusters() ClustersAPI { - var clustersAPI ClustersAPI - return clustersAPI.init(c) -} - -// Dbfs returns an instance of DbfsAPI -func (c DBClient) Dbfs() DbfsAPI { - var dbfsAPI DbfsAPI - return dbfsAPI.init(c) -} - -// Groups returns an instance of GroupAPI -func (c DBClient) Groups() GroupsAPI { - var groupsAPI GroupsAPI - return groupsAPI.init(c) -} - -// InstancePools returns an instance of InstancePoolsAPI -func (c DBClient) InstancePools() InstancePoolsAPI { - var instancePoolsAPI InstancePoolsAPI - return instancePoolsAPI.init(c) -} - -// Jobs returns an instance of JobsAPI -func (c DBClient) Jobs() JobsAPI { - var jobsAPI JobsAPI - return jobsAPI.init(c) -} - -// Libraries returns an instance of LibrariesAPI -func (c DBClient) Libraries() LibrariesAPI { - var libraries LibrariesAPI - return libraries.init(c) -} - -// Scim returns an instance of ScimAPI -func (c DBClient) Scim() ScimAPI { - var scimAPI ScimAPI - return scimAPI.init(c) -} - -// Secrets returns an instance of SecretsAPI -func (c DBClient) Secrets() SecretsAPI { - var secretsAPI SecretsAPI - return secretsAPI.init(c) -} - -// Token returns an instance of TokensAPI -func (c DBClient) Token() TokenAPI { - var tokenAPI TokenAPI - return tokenAPI.init(c) -} - -// Workspace returns an instance of WorkspaceAPI -func (c DBClient) Workspace() WorkspaceAPI { - var workspaceAPI WorkspaceAPI - return workspaceAPI.init(c) -} - -func (c *DBClient) performQuery(method, path string, data interface{}, headers map[string]string) ([]byte, error) { - return databricks.PerformQuery(c.Option, method, path, data, headers) -} +package azure + +import databricks "github.com/xinsnake/databricks-sdk-golang" + +// DBClient is the client for Azure implements DBClient +type DBClient struct { + Option databricks.DBClientOption +} + +// Init initializes the client +func (c *DBClient) Init(option databricks.DBClientOption) DBClient { + c.Option = option + option.Init() + return *c +} + +// Clusters returns an instance of ClustersAPI +func (c DBClient) Clusters() ClustersAPI { + var clustersAPI ClustersAPI + return clustersAPI.init(c) +} + +// Dbfs returns an instance of DbfsAPI +func (c DBClient) Dbfs() DbfsAPI { + var dbfsAPI DbfsAPI + return dbfsAPI.init(c) +} + +// Groups returns an instance of GroupAPI +func (c DBClient) Groups() GroupsAPI { + var groupsAPI GroupsAPI + return groupsAPI.init(c) +} + +// InstancePools returns an instance of InstancePoolsAPI +func (c DBClient) InstancePools() InstancePoolsAPI { + var instancePoolsAPI InstancePoolsAPI + return instancePoolsAPI.init(c) +} + +// Jobs returns an instance of JobsAPI +func (c DBClient) Jobs() JobsAPI { + var jobsAPI JobsAPI + return jobsAPI.init(c) +} + +// Libraries returns an instance of LibrariesAPI +func (c DBClient) Libraries() LibrariesAPI { + var libraries LibrariesAPI + return libraries.init(c) +} + +// Scim returns an instance of ScimAPI +func (c DBClient) Scim() ScimAPI { + var scimAPI ScimAPI + return scimAPI.init(c) +} + +// Secrets returns an instance of SecretsAPI +func (c DBClient) Secrets() SecretsAPI { + var secretsAPI SecretsAPI + return secretsAPI.init(c) +} + +// Token returns an instance of TokensAPI +func (c DBClient) Token() TokenAPI { + var tokenAPI TokenAPI + return tokenAPI.init(c) +} + +// Workspace returns an instance of WorkspaceAPI +func (c DBClient) Workspace() WorkspaceAPI { + var workspaceAPI WorkspaceAPI + return workspaceAPI.init(c) +} + +func (c *DBClient) performQuery(method, path string, data interface{}, headers map[string]string) ([]byte, error) { + return databricks.PerformQuery(c.Option, method, path, data, headers) +} diff --git a/azure/client_test.go b/azure/client_test.go index 7aa87b0..5d54b31 100644 --- a/azure/client_test.go +++ b/azure/client_test.go @@ -1,63 +1,63 @@ -package azure_test - -import ( - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - - databricks "github.com/xinsnake/databricks-sdk-golang" - . "github.com/xinsnake/databricks-sdk-golang/azure" -) - -var _ = Describe("Client", func() { - testUser := "test-user" - option := databricks.DBClientOption{ - User: testUser, - } - - var dbClient DBClient - dbClient.Init(option) - - It("Should init with correct option", func() { - Expect(dbClient.Option.User).To(Equal(testUser)) - }) - - It("Should return initialised ClustersAPI", func() { - Expect(dbClient.Clusters().Client.Option.User).To(Equal(testUser)) - }) - - It("Should return initialised DbfsAPI", func() { - Expect(dbClient.Dbfs().Client.Option.User).To(Equal(testUser)) - }) - - It("Should return initialised GroupsAPI", func() { - Expect(dbClient.Groups().Client.Option.User).To(Equal(testUser)) - }) - - It("Should return initialised InstancePoolsAPI", func() { - Expect(dbClient.InstancePools().Client.Option.User).To(Equal(testUser)) - }) - - It("Should return initialised JobsAPI", func() { - Expect(dbClient.Jobs().Client.Option.User).To(Equal(testUser)) - }) - - It("Should return initialised LibrariesAPI", func() { - Expect(dbClient.Libraries().Client.Option.User).To(Equal(testUser)) - }) - - It("Should return initialised ScimAPI", func() { - Expect(dbClient.Scim().Client.Option.User).To(Equal(testUser)) - }) - - It("Should return initialised SecretsAPI", func() { - Expect(dbClient.Secrets().Client.Option.User).To(Equal(testUser)) - }) - - It("Should return initialised TokenAPI", func() { - Expect(dbClient.Token().Client.Option.User).To(Equal(testUser)) - }) - - It("Should return initialised WorkspaceAPI", func() { - Expect(dbClient.Workspace().Client.Option.User).To(Equal(testUser)) - }) -}) +package azure_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + databricks "github.com/xinsnake/databricks-sdk-golang" + . "github.com/xinsnake/databricks-sdk-golang/azure" +) + +var _ = Describe("Client", func() { + testUser := "test-user" + option := databricks.DBClientOption{ + User: testUser, + } + + var dbClient DBClient + dbClient.Init(option) + + It("Should init with correct option", func() { + Expect(dbClient.Option.User).To(Equal(testUser)) + }) + + It("Should return initialised ClustersAPI", func() { + Expect(dbClient.Clusters().Client.Option.User).To(Equal(testUser)) + }) + + It("Should return initialised DbfsAPI", func() { + Expect(dbClient.Dbfs().Client.Option.User).To(Equal(testUser)) + }) + + It("Should return initialised GroupsAPI", func() { + Expect(dbClient.Groups().Client.Option.User).To(Equal(testUser)) + }) + + It("Should return initialised InstancePoolsAPI", func() { + Expect(dbClient.InstancePools().Client.Option.User).To(Equal(testUser)) + }) + + It("Should return initialised JobsAPI", func() { + Expect(dbClient.Jobs().Client.Option.User).To(Equal(testUser)) + }) + + It("Should return initialised LibrariesAPI", func() { + Expect(dbClient.Libraries().Client.Option.User).To(Equal(testUser)) + }) + + It("Should return initialised ScimAPI", func() { + Expect(dbClient.Scim().Client.Option.User).To(Equal(testUser)) + }) + + It("Should return initialised SecretsAPI", func() { + Expect(dbClient.Secrets().Client.Option.User).To(Equal(testUser)) + }) + + It("Should return initialised TokenAPI", func() { + Expect(dbClient.Token().Client.Option.User).To(Equal(testUser)) + }) + + It("Should return initialised WorkspaceAPI", func() { + Expect(dbClient.Workspace().Client.Option.User).To(Equal(testUser)) + }) +}) diff --git a/azure/clusters.go b/azure/clusters.go index 436d553..ef64395 100644 --- a/azure/clusters.go +++ b/azure/clusters.go @@ -1,221 +1,222 @@ -package azure - -import ( - "encoding/json" - "net/http" - - "github.com/xinsnake/databricks-sdk-golang/azure/models" - "github.com/xinsnake/databricks-sdk-golang/azure/clusters" -) - -// ClustersAPI exposes the Clusters API -type ClustersAPI struct { - Client DBClient -} - -func (a ClustersAPI) init(client DBClient) ClustersAPI { - a.Client = client - return a -} - -// Create creates a new Spark cluster -func (a ClustersAPI) Create(cluster clusters.CreateReq) (clusters.CreateResp, error) { - var createResp clusters.CreateResp - - resp, err := a.Client.performQuery(http.MethodPost, "/clusters/create", cluster, nil) - if err != nil { - return createResp, err - } - - err = json.Unmarshal(resp, &createResp) - return createResp, err -} - -// Edit edits the configuration of a cluster to match the provided attributes and size -func (a ClustersAPI) Edit(editReq clusters.EditReq) error { - _, err := a.Client.performQuery(http.MethodPost, "/clusters/edit", editReq, nil) - return err -} - -// Start starts a terminated Spark cluster given its ID -func (a ClustersAPI) Start(clusterID string) error { - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - }{ - clusterID, - } - _, err := a.Client.performQuery(http.MethodPost, "/clusters/start", data, nil) - return err -} - -// Restart restart a Spark cluster given its ID. If the cluster is not in a RUNNING state, nothing will happen. -func (a ClustersAPI) Restart(clusterID string) error { - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - }{ - clusterID, - } - _, err := a.Client.performQuery(http.MethodPost, "/clusters/restart", data, nil) - return err -} - -// Resize resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a RUNNING state. -func (a ClustersAPI) Resize(clusterID string, clusterSize models.ClusterSize) error { - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - models.ClusterSize - }{ - clusterID, - clusterSize, - } - _, err := a.Client.performQuery(http.MethodPost, "/clusters/resize", data, nil) - return err -} - -// Terminate terminates a Spark cluster given its ID -func (a ClustersAPI) Terminate(clusterID string) error { - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - }{ - clusterID, - } - _, err := a.Client.performQuery(http.MethodPost, "/clusters/delete", data, nil) - return err -} - -// Delete is an alias of Terminate -func (a ClustersAPI) Delete(clusterID string) error { - return a.Terminate(clusterID) -} - -// PermanentDelete permanently delete a cluster -func (a ClustersAPI) PermanentDelete(clusterID string) error { - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - }{ - clusterID, - } - _, err := a.Client.performQuery(http.MethodPost, "/clusters/permanent-delete", data, nil) - return err -} - -// Get retrieves the information for a cluster given its identifier -func (a ClustersAPI) Get(clusterID string) (clusters.GetResp, error) { - var clusterInfo clusters.GetResp - - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - }{ - clusterID, - } - resp, err := a.Client.performQuery(http.MethodGet, "/clusters/get", data, nil) - if err != nil { - return clusterInfo, err - } - - err = json.Unmarshal(resp, &clusterInfo) - return clusterInfo, err -} - -// Pin ensure that an interactive cluster configuration is retained even after a cluster has been terminated for more than 30 days -func (a ClustersAPI) Pin(clusterID string) error { - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - }{ - clusterID, - } - _, err := a.Client.performQuery(http.MethodPost, "/clusters/pin", data, nil) - return err -} - -// Unpin allows the cluster to eventually be removed from the list returned by the List API -func (a ClustersAPI) Unpin(clusterID string) error { - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - }{ - clusterID, - } - _, err := a.Client.performQuery(http.MethodPost, "/clusters/unpin", data, nil) - return err -} - -// List return information about all pinned clusters, currently active clusters, -// up to 70 of the most recently terminated interactive clusters in the past 30 days, -// and up to 30 of the most recently terminated job clusters in the past 30 days -func (a ClustersAPI) List() ([]clusters.GetResp, error) { - var clusterList = struct { - Clusters []clusters.GetResp `json:"clusters,omitempty" url:"clusters,omitempty"` - }{} - - resp, err := a.Client.performQuery(http.MethodGet, "/clusters/list", nil, nil) - if err != nil { - return clusterList.Clusters, err - } - - err = json.Unmarshal(resp, &clusterList) - return clusterList.Clusters, err -} - -// ListNodeTypes returns a list of supported Spark node types -func (a ClustersAPI) ListNodeTypes() ([]clusters.ListNodeTypesRespItem, error) { - var nodeTypeList = struct { - NodeTypes []clusters.ListNodeTypesRespItem `json:"node_types,omitempty" url:"node_types,omitempty"` - }{} - - resp, err := a.Client.performQuery(http.MethodGet, "/clusters/list-node-types", nil, nil) - if err != nil { - return nodeTypeList.NodeTypes, err - } - - err = json.Unmarshal(resp, &nodeTypeList) - return nodeTypeList.NodeTypes, err -} - -// SparkVersions return the list of available Spark versions -func (a ClustersAPI) SparkVersions() ([]clusters.SparkVersionsRespItem, error) { - var versionsList = struct { - Versions []clusters.SparkVersionsRespItem `json:"versions,omitempty" url:"versions,omitempty"` - }{} - - resp, err := a.Client.performQuery(http.MethodGet, "/clusters/spark-versions", nil, nil) - if err != nil { - return versionsList.Versions, err - } - - err = json.Unmarshal(resp, &versionsList) - return versionsList.Versions, err -} - - -// Events retrieves a list of events about the activity of a cluster -func (a ClustersAPI) Events( - clusterID string, startTime, endTime int64, order models.ListOrder, - eventTypes []models.ClusterEventType, offset, limit int64) (clusters.EventsResp, error) { - - var eventsResponse clusters.EventsResp - - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"` - EndTime int64 `json:"end_time,omitempty" url:"end_time,omitempty"` - Order models.ListOrder `json:"order,omitempty" url:"order,omitempty"` - EventTypes []models.ClusterEventType `json:"event_types,omitempty" url:"event_types,omitempty"` - Offset int64 `json:"offset,omitempty" url:"offset,omitempty"` - Limit int64 `json:"limit,omitempty" url:"limit,omitempty"` - }{ - clusterID, - startTime, - endTime, - order, - eventTypes, - offset, - limit, - } - resp, err := a.Client.performQuery(http.MethodPost, "/clusters/events", data, nil) - if err != nil { - return eventsResponse, err - } - - err = json.Unmarshal(resp, &eventsResponse) - return eventsResponse, err -} +package azure + +import ( + "encoding/json" + "net/http" + + "github.com/xinsnake/databricks-sdk-golang/azure/models" + clusterModels "github.com/xinsnake/databricks-sdk-golang/azure/clusters/models" + clusterHttpModels "github.com/xinsnake/databricks-sdk-golang/azure/clusters/httpmodels" +) + +// ClustersAPI exposes the Clusters API +type ClustersAPI struct { + Client DBClient +} + +func (a ClustersAPI) init(client DBClient) ClustersAPI { + a.Client = client + return a +} + +// Create creates a new Spark cluster +func (a ClustersAPI) Create(cluster clusterHttpModels.CreateReq) (clusterHttpModels.CreateResp, error) { + var createResp clusterHttpModels.CreateResp + + resp, err := a.Client.performQuery(http.MethodPost, "/clusters/create", cluster, nil) + if err != nil { + return createResp, err + } + + err = json.Unmarshal(resp, &createResp) + return createResp, err +} + +// Edit edits the configuration of a cluster to match the provided attributes and size +func (a ClustersAPI) Edit(editReq clusterHttpModels.EditReq) error { + _, err := a.Client.performQuery(http.MethodPost, "/clusters/edit", editReq, nil) + return err +} + +// Start starts a terminated Spark cluster given its ID +func (a ClustersAPI) Start(clusterID string) error { + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + }{ + clusterID, + } + _, err := a.Client.performQuery(http.MethodPost, "/clusters/start", data, nil) + return err +} + +// Restart restart a Spark cluster given its ID. If the cluster is not in a RUNNING state, nothing will happen. +func (a ClustersAPI) Restart(clusterID string) error { + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + }{ + clusterID, + } + _, err := a.Client.performQuery(http.MethodPost, "/clusters/restart", data, nil) + return err +} + +// Resize resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a RUNNING state. +func (a ClustersAPI) Resize(clusterID string, clusterSize clusterModels.ClusterSize) error { + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + clusterModels.ClusterSize + }{ + clusterID, + clusterSize, + } + _, err := a.Client.performQuery(http.MethodPost, "/clusters/resize", data, nil) + return err +} + +// Terminate terminates a Spark cluster given its ID +func (a ClustersAPI) Terminate(clusterID string) error { + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + }{ + clusterID, + } + _, err := a.Client.performQuery(http.MethodPost, "/clusters/delete", data, nil) + return err +} + +// Delete is an alias of Terminate +func (a ClustersAPI) Delete(clusterID string) error { + return a.Terminate(clusterID) +} + +// PermanentDelete permanently delete a cluster +func (a ClustersAPI) PermanentDelete(clusterID string) error { + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + }{ + clusterID, + } + _, err := a.Client.performQuery(http.MethodPost, "/clusters/permanent-delete", data, nil) + return err +} + +// Get retrieves the information for a cluster given its identifier +func (a ClustersAPI) Get(clusterID string) (clusterHttpModels.GetResp, error) { + var clusterInfo clusterHttpModels.GetResp + + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + }{ + clusterID, + } + resp, err := a.Client.performQuery(http.MethodGet, "/clusters/get", data, nil) + if err != nil { + return clusterInfo, err + } + + err = json.Unmarshal(resp, &clusterInfo) + return clusterInfo, err +} + +// Pin ensure that an interactive cluster configuration is retained even after a cluster has been terminated for more than 30 days +func (a ClustersAPI) Pin(clusterID string) error { + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + }{ + clusterID, + } + _, err := a.Client.performQuery(http.MethodPost, "/clusters/pin", data, nil) + return err +} + +// Unpin allows the cluster to eventually be removed from the list returned by the List API +func (a ClustersAPI) Unpin(clusterID string) error { + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + }{ + clusterID, + } + _, err := a.Client.performQuery(http.MethodPost, "/clusters/unpin", data, nil) + return err +} + +// List return information about all pinned clusters, currently active clusters, +// up to 70 of the most recently terminated interactive clusters in the past 30 days, +// and up to 30 of the most recently terminated job clusters in the past 30 days +func (a ClustersAPI) List() ([]clusterHttpModels.GetResp, error) { + var clusterList = struct { + Clusters []clusterHttpModels.GetResp `json:"clusters,omitempty" url:"clusters,omitempty"` + }{} + + resp, err := a.Client.performQuery(http.MethodGet, "/clusters/list", nil, nil) + if err != nil { + return clusterList.Clusters, err + } + + err = json.Unmarshal(resp, &clusterList) + return clusterList.Clusters, err +} + +// ListNodeTypes returns a list of supported Spark node types +func (a ClustersAPI) ListNodeTypes() ([]clusterHttpModels.ListNodeTypesRespItem, error) { + var nodeTypeList = struct { + NodeTypes []clusterHttpModels.ListNodeTypesRespItem `json:"node_types,omitempty" url:"node_types,omitempty"` + }{} + + resp, err := a.Client.performQuery(http.MethodGet, "/clusters/list-node-types", nil, nil) + if err != nil { + return nodeTypeList.NodeTypes, err + } + + err = json.Unmarshal(resp, &nodeTypeList) + return nodeTypeList.NodeTypes, err +} + +// SparkVersions return the list of available Spark versions +func (a ClustersAPI) SparkVersions() ([]clusterHttpModels.SparkVersionsRespItem, error) { + var versionsList = struct { + Versions []clusterHttpModels.SparkVersionsRespItem `json:"versions,omitempty" url:"versions,omitempty"` + }{} + + resp, err := a.Client.performQuery(http.MethodGet, "/clusters/spark-versions", nil, nil) + if err != nil { + return versionsList.Versions, err + } + + err = json.Unmarshal(resp, &versionsList) + return versionsList.Versions, err +} + + +// Events retrieves a list of events about the activity of a cluster +func (a ClustersAPI) Events( + clusterID string, startTime, endTime int64, order models.ListOrder, + eventTypes []clusterModels.ClusterEventType, offset, limit int64) (clusterHttpModels.EventsResp, error) { + + var eventsResponse clusterHttpModels.EventsResp + + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"` + EndTime int64 `json:"end_time,omitempty" url:"end_time,omitempty"` + Order models.ListOrder `json:"order,omitempty" url:"order,omitempty"` + EventTypes []clusterModels.ClusterEventType `json:"event_types,omitempty" url:"event_types,omitempty"` + Offset int64 `json:"offset,omitempty" url:"offset,omitempty"` + Limit int64 `json:"limit,omitempty" url:"limit,omitempty"` + }{ + clusterID, + startTime, + endTime, + order, + eventTypes, + offset, + limit, + } + resp, err := a.Client.performQuery(http.MethodPost, "/clusters/events", data, nil) + if err != nil { + return eventsResponse, err + } + + err = json.Unmarshal(resp, &eventsResponse) + return eventsResponse, err +} diff --git a/azure/clusters/Create.go b/azure/clusters/httpmodels/create.go similarity index 94% rename from azure/clusters/Create.go rename to azure/clusters/httpmodels/create.go index 5146e59..86201b0 100644 --- a/azure/clusters/Create.go +++ b/azure/clusters/httpmodels/create.go @@ -1,28 +1,28 @@ -package clusters - -import ( - models "github.com/xinsnake/databricks-sdk-golang/azure/models" -) - -type CreateReq struct { - NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"` - Autoscale *models.AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"` - ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"` - SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"` - SparkConf map[string]string `json:"spark_conf,omitempty" url:"spark_conf,omitempty"` - NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` - DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"` - CustomTags []models.ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"` - ClusterLogConf *models.ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"` - InitScripts []models.InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"` - DockerImage models.DockerImage `json:"docker_image,omitempty" url:"docker_image,omitempty"` - SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"` - EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"` - AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"` - InstancePoolID string `json:"instance_pool_id,omitempty" url:"instance_pool_id,omitempty"` - IdempotencyToken string `json:"idempotency_token,omitempty" url:"idempotency_token,omitempty"` -} - -type CreateResp struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` -} +package httpmodels + +import ( + "github.com/xinsnake/databricks-sdk-golang/azure/clusters/models" +) + +type CreateReq struct { + NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"` + Autoscale *models.AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"` + ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"` + SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"` + SparkConf map[string]string `json:"spark_conf,omitempty" url:"spark_conf,omitempty"` + NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` + DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"` + CustomTags []models.ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"` + ClusterLogConf *models.ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"` + InitScripts []models.InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"` + DockerImage models.DockerImage `json:"docker_image,omitempty" url:"docker_image,omitempty"` + SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"` + EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"` + AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"` + InstancePoolID string `json:"instance_pool_id,omitempty" url:"instance_pool_id,omitempty"` + IdempotencyToken string `json:"idempotency_token,omitempty" url:"idempotency_token,omitempty"` +} + +type CreateResp struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` +} diff --git a/azure/clusters/Edit.go b/azure/clusters/httpmodels/edit.go similarity index 96% rename from azure/clusters/Edit.go rename to azure/clusters/httpmodels/edit.go index a15006b..198239a 100644 --- a/azure/clusters/Edit.go +++ b/azure/clusters/httpmodels/edit.go @@ -1,36 +1,36 @@ -package clusters - -import ( - "github.com/xinsnake/databricks-sdk-golang/azure/models" -) - -type EditReq struct { - NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"` - AutoScale *models.AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"` - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"` - Driver *models.SparkNode `json:"driver,omitempty" url:"driver,omitempty"` - Executors []models.SparkNode `json:"executors,omitempty" url:"executors,omitempty"` - SparkContextID int64 `json:"spark_context_id,omitempty" url:"spark_context_id,omitempty"` - JdbcPort int32 `json:"jdbc_port,omitempty" url:"jdbc_port,omitempty"` - ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"` - SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"` - SparkConf *models.SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"` - NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` - DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"` - ClusterLogConf *models.ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"` - InitScripts []models.InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"` - SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"` - AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"` - State *models.ClusterState `json:"state,omitempty" url:"state,omitempty"` - StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"` - StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"` - TerminateTime int64 `json:"terminate_time,omitempty" url:"terminate_time,omitempty"` - LastStateLossTime int64 `json:"last_state_loss_time,omitempty" url:"last_state_loss_time,omitempty"` - LastActivityTime int64 `json:"last_activity_time,omitempty" url:"last_activity_time,omitempty"` - ClusterMemoryMb int64 `json:"cluster_memory_mb,omitempty" url:"cluster_memory_mb,omitempty"` - ClusterCores float32 `json:"cluster_cores,omitempty" url:"cluster_cores,omitempty"` - DefaultTags map[string]string `json:"default_tags,omitempty" url:"default_tags,omitempty"` - ClusterLogStatus *models.LogSyncStatus `json:"cluster_log_status,omitempty" url:"cluster_log_status,omitempty"` - TerminationReason *models.TerminationReason `json:"termination_reason,omitempty" url:"termination_reason,omitempty"` -} +package httpmodels + +import ( + "github.com/xinsnake/databricks-sdk-golang/azure/clusters/models" +) + +type EditReq struct { + NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"` + AutoScale *models.AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"` + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"` + Driver *models.SparkNode `json:"driver,omitempty" url:"driver,omitempty"` + Executors []models.SparkNode `json:"executors,omitempty" url:"executors,omitempty"` + SparkContextID int64 `json:"spark_context_id,omitempty" url:"spark_context_id,omitempty"` + JdbcPort int32 `json:"jdbc_port,omitempty" url:"jdbc_port,omitempty"` + ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"` + SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"` + SparkConf *models.SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"` + NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` + DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"` + ClusterLogConf *models.ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"` + InitScripts []models.InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"` + SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"` + AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"` + State *models.ClusterState `json:"state,omitempty" url:"state,omitempty"` + StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"` + StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"` + TerminateTime int64 `json:"terminate_time,omitempty" url:"terminate_time,omitempty"` + LastStateLossTime int64 `json:"last_state_loss_time,omitempty" url:"last_state_loss_time,omitempty"` + LastActivityTime int64 `json:"last_activity_time,omitempty" url:"last_activity_time,omitempty"` + ClusterMemoryMb int64 `json:"cluster_memory_mb,omitempty" url:"cluster_memory_mb,omitempty"` + ClusterCores float32 `json:"cluster_cores,omitempty" url:"cluster_cores,omitempty"` + DefaultTags map[string]string `json:"default_tags,omitempty" url:"default_tags,omitempty"` + ClusterLogStatus *models.LogSyncStatus `json:"cluster_log_status,omitempty" url:"cluster_log_status,omitempty"` + TerminationReason *models.TerminationReason `json:"termination_reason,omitempty" url:"termination_reason,omitempty"` +} diff --git a/azure/clusters/events.go b/azure/clusters/httpmodels/events.go similarity index 82% rename from azure/clusters/events.go rename to azure/clusters/httpmodels/events.go index 0a73760..1a5ea53 100644 --- a/azure/clusters/events.go +++ b/azure/clusters/httpmodels/events.go @@ -1,15 +1,15 @@ -package clusters - -import ( - "github.com/xinsnake/databricks-sdk-golang/azure/models" -) - -type EventsResp struct { - Events []models.ClusterEvent `json:"events,omitempty" url:"events,omitempty"` - NextPage struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - EndTime int64 `json:"end_time,omitempty" url:"end_time,omitempty"` - Offset int32 `json:"offset,omitempty" url:"offset,omitempty"` - } `json:"next_page,omitempty" url:"next_page,omitempty"` - TotalCount int32 `json:"total_count,omitempty" url:"total_count,omitempty"` +package httpmodels + +import ( + "github.com/xinsnake/databricks-sdk-golang/azure/clusters/models" +) + +type EventsResp struct { + Events []models.ClusterEvent `json:"events,omitempty" url:"events,omitempty"` + NextPage struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + EndTime int64 `json:"end_time,omitempty" url:"end_time,omitempty"` + Offset int32 `json:"offset,omitempty" url:"offset,omitempty"` + } `json:"next_page,omitempty" url:"next_page,omitempty"` + TotalCount int32 `json:"total_count,omitempty" url:"total_count,omitempty"` } \ No newline at end of file diff --git a/azure/clusters/get.go b/azure/clusters/httpmodels/get.go similarity index 96% rename from azure/clusters/get.go rename to azure/clusters/httpmodels/get.go index 75255db..c0808d3 100644 --- a/azure/clusters/get.go +++ b/azure/clusters/httpmodels/get.go @@ -1,36 +1,36 @@ -package clusters - -import ( - "github.com/xinsnake/databricks-sdk-golang/azure/models" -) - -type GetResp struct { - NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"` - AutoScale *models.AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"` - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"` - Driver *models.SparkNode `json:"driver,omitempty" url:"driver,omitempty"` - Executors []models.SparkNode `json:"executors,omitempty" url:"executors,omitempty"` - SparkContextID int64 `json:"spark_context_id,omitempty" url:"spark_context_id,omitempty"` - JdbcPort int32 `json:"jdbc_port,omitempty" url:"jdbc_port,omitempty"` - ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"` - SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"` - SparkConf *models.SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"` - NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` - DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"` - ClusterLogConf *models.ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"` - InitScripts []models.InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"` - SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"` - AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"` - State *models.ClusterState `json:"state,omitempty" url:"state,omitempty"` - StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"` - StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"` - TerminateTime int64 `json:"terminate_time,omitempty" url:"terminate_time,omitempty"` - LastStateLossTime int64 `json:"last_state_loss_time,omitempty" url:"last_state_loss_time,omitempty"` - LastActivityTime int64 `json:"last_activity_time,omitempty" url:"last_activity_time,omitempty"` - ClusterMemoryMb int64 `json:"cluster_memory_mb,omitempty" url:"cluster_memory_mb,omitempty"` - ClusterCores float32 `json:"cluster_cores,omitempty" url:"cluster_cores,omitempty"` - DefaultTags map[string]string `json:"default_tags,omitempty" url:"default_tags,omitempty"` - ClusterLogStatus *models.LogSyncStatus `json:"cluster_log_status,omitempty" url:"cluster_log_status,omitempty"` - TerminationReason *models.TerminationReason `json:"termination_reason,omitempty" url:"termination_reason,omitempty"` -} +package httpmodels + +import ( + "github.com/xinsnake/databricks-sdk-golang/azure/clusters/models" +) + +type GetResp struct { + NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"` + AutoScale *models.AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"` + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"` + Driver *models.SparkNode `json:"driver,omitempty" url:"driver,omitempty"` + Executors []models.SparkNode `json:"executors,omitempty" url:"executors,omitempty"` + SparkContextID int64 `json:"spark_context_id,omitempty" url:"spark_context_id,omitempty"` + JdbcPort int32 `json:"jdbc_port,omitempty" url:"jdbc_port,omitempty"` + ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"` + SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"` + SparkConf *models.SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"` + NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` + DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"` + ClusterLogConf *models.ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"` + InitScripts []models.InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"` + SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"` + AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"` + State *models.ClusterState `json:"state,omitempty" url:"state,omitempty"` + StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"` + StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"` + TerminateTime int64 `json:"terminate_time,omitempty" url:"terminate_time,omitempty"` + LastStateLossTime int64 `json:"last_state_loss_time,omitempty" url:"last_state_loss_time,omitempty"` + LastActivityTime int64 `json:"last_activity_time,omitempty" url:"last_activity_time,omitempty"` + ClusterMemoryMb int64 `json:"cluster_memory_mb,omitempty" url:"cluster_memory_mb,omitempty"` + ClusterCores float32 `json:"cluster_cores,omitempty" url:"cluster_cores,omitempty"` + DefaultTags map[string]string `json:"default_tags,omitempty" url:"default_tags,omitempty"` + ClusterLogStatus *models.LogSyncStatus `json:"cluster_log_status,omitempty" url:"cluster_log_status,omitempty"` + TerminationReason *models.TerminationReason `json:"termination_reason,omitempty" url:"termination_reason,omitempty"` +} diff --git a/azure/clusters/list_node_types.go b/azure/clusters/httpmodels/list_node_types.go similarity index 96% rename from azure/clusters/list_node_types.go rename to azure/clusters/httpmodels/list_node_types.go index 4643339..8d5070e 100644 --- a/azure/clusters/list_node_types.go +++ b/azure/clusters/httpmodels/list_node_types.go @@ -1,15 +1,15 @@ -package clusters - -import ( - "github.com/xinsnake/databricks-sdk-golang/azure/models" -) - -type ListNodeTypesRespItem struct { - NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` - MemoryMb int32 `json:"memory_mb,omitempty" url:"memory_mb,omitempty"` - NumCores float32 `json:"num_cores,omitempty" url:"num_cores,omitempty"` - Description string `json:"description,omitempty" url:"description,omitempty"` - InstanceTypeID string `json:"instance_type_id,omitempty" url:"instance_type_id,omitempty"` - IsDeprecated bool `json:"is_deprecated,omitempty" url:"is_deprecated,omitempty"` - NodeInfo *models.ClusterCloudProviderNodeInfo `json:"node_info,omitempty" url:"node_info,omitempty"` -} +package httpmodels + +import ( + "github.com/xinsnake/databricks-sdk-golang/azure/models" +) + +type ListNodeTypesRespItem struct { + NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` + MemoryMb int32 `json:"memory_mb,omitempty" url:"memory_mb,omitempty"` + NumCores float32 `json:"num_cores,omitempty" url:"num_cores,omitempty"` + Description string `json:"description,omitempty" url:"description,omitempty"` + InstanceTypeID string `json:"instance_type_id,omitempty" url:"instance_type_id,omitempty"` + IsDeprecated bool `json:"is_deprecated,omitempty" url:"is_deprecated,omitempty"` + NodeInfo *models.ClusterCloudProviderNodeInfo `json:"node_info,omitempty" url:"node_info,omitempty"` +} diff --git a/azure/clusters/spark_versions.go b/azure/clusters/httpmodels/spark_versions.go similarity index 85% rename from azure/clusters/spark_versions.go rename to azure/clusters/httpmodels/spark_versions.go index deb2e7f..cc93410 100644 --- a/azure/clusters/spark_versions.go +++ b/azure/clusters/httpmodels/spark_versions.go @@ -1,6 +1,6 @@ -package clusters - -type SparkVersionsRespItem struct { - Key string `json:"key,omitempty" url:"key,omitempty"` - Name string `json:"name,omitempty" url:"name,omitempty"` -} +package httpmodels + +type SparkVersionsRespItem struct { + Key string `json:"key,omitempty" url:"key,omitempty"` + Name string `json:"name,omitempty" url:"name,omitempty"` +} diff --git a/azure/models/AutoScale.go b/azure/clusters/models/AutoScale.go similarity index 97% rename from azure/models/AutoScale.go rename to azure/clusters/models/AutoScale.go index b5f2648..bbf1f69 100644 --- a/azure/models/AutoScale.go +++ b/azure/clusters/models/AutoScale.go @@ -1,6 +1,6 @@ -package models - -type AutoScale struct { - MinWorkers int32 `json:"min_workers,omitempty" url:"min_workers,omitempty"` - MaxWorkers int32 `json:"max_workers,omitempty" url:"max_workers,omitempty"` -} +package models + +type AutoScale struct { + MinWorkers int32 `json:"min_workers,omitempty" url:"min_workers,omitempty"` + MaxWorkers int32 `json:"max_workers,omitempty" url:"max_workers,omitempty"` +} diff --git a/azure/models/AzureDiskVolumeType.go b/azure/clusters/models/AzureDiskVolumeType.go similarity index 95% rename from azure/models/AzureDiskVolumeType.go rename to azure/clusters/models/AzureDiskVolumeType.go index de10f19..e905bf1 100644 --- a/azure/models/AzureDiskVolumeType.go +++ b/azure/clusters/models/AzureDiskVolumeType.go @@ -1,8 +1,8 @@ -package models - -type AzureDiskVolumeType string - -const ( - AzureDiskVolumeTypePremiumLRS = "PREMIUM_LRS" - AzureDiskVolumeTypeStandardLRS = "STANDARD_LRS" -) +package models + +type AzureDiskVolumeType string + +const ( + AzureDiskVolumeTypePremiumLRS = "PREMIUM_LRS" + AzureDiskVolumeTypeStandardLRS = "STANDARD_LRS" +) diff --git a/azure/models/ClusterAttributes.go b/azure/clusters/models/ClusterAttributes.go similarity index 98% rename from azure/models/ClusterAttributes.go rename to azure/clusters/models/ClusterAttributes.go index bdade33..e272a60 100644 --- a/azure/models/ClusterAttributes.go +++ b/azure/clusters/models/ClusterAttributes.go @@ -1,17 +1,17 @@ -package models - -type ClusterAttributes struct { - ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"` - SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"` - SparkConf *SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"` - NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` - DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"` - SSHPublicKeys []string `json:"ssh_public_keys,omitempty" url:"ssh_public_keys,omitempty"` - CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"` - ClusterLogConf *ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"` - InitScripts []InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"` - SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"` - AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"` - EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"` - ClusterSource *ClusterSource `json:"cluster_source,omitempty" url:"cluster_source,omitempty"` -} +package models + +type ClusterAttributes struct { + ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"` + SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"` + SparkConf *SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"` + NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` + DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"` + SSHPublicKeys []string `json:"ssh_public_keys,omitempty" url:"ssh_public_keys,omitempty"` + CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"` + ClusterLogConf *ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"` + InitScripts []InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"` + SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"` + AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"` + EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"` + ClusterSource *ClusterSource `json:"cluster_source,omitempty" url:"cluster_source,omitempty"` +} diff --git a/azure/models/ClusterEvent.go b/azure/clusters/models/ClusterEvent.go similarity index 97% rename from azure/models/ClusterEvent.go rename to azure/clusters/models/ClusterEvent.go index 3c4bf35..743fcb6 100644 --- a/azure/models/ClusterEvent.go +++ b/azure/clusters/models/ClusterEvent.go @@ -1,8 +1,8 @@ -package models - -type ClusterEvent struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - Timestamp int64 `json:"timestamp,omitempty" url:"timestamp,omitempty"` - Type *ClusterEventType `json:"type,omitempty" url:"type,omitempty"` - Details *EventDetails `json:"details,omitempty" url:"details,omitempty"` -} +package models + +type ClusterEvent struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + Timestamp int64 `json:"timestamp,omitempty" url:"timestamp,omitempty"` + Type *ClusterEventType `json:"type,omitempty" url:"type,omitempty"` + Details *EventDetails `json:"details,omitempty" url:"details,omitempty"` +} diff --git a/azure/models/ClusterEventType.go b/azure/clusters/models/ClusterEventType.go similarity index 97% rename from azure/models/ClusterEventType.go rename to azure/clusters/models/ClusterEventType.go index d9d550a..bed64b4 100644 --- a/azure/models/ClusterEventType.go +++ b/azure/clusters/models/ClusterEventType.go @@ -1,27 +1,27 @@ -package models - -type ClusterEventType string - -const ( - ClusterEventTypeCreating = "CREATING" - ClusterEventTypeDidNotExpandDisk = "DID_NOT_EXPAND_DISK" - ClusterEventTypeExpandedDisk = "EXPANDED_DISK" - ClusterEventTypeFailedToExpandDisk = "FAILED_TO_EXPAND_DISK" - ClusterEventTypeInitScriptStarting = "INIT_SCRIPTS_STARTING" - ClusterEventTypeInitScriptFinished = "INIT_SCRIPTS_FINISHED" - ClusterEventTypeStarting = "STARTING" - ClusterEventTypeRestarting = "RESTARTING" - ClusterEventTypeTerminating = "TERMINATING" - ClusterEventTypeEdited = "EDITED" - ClusterEventTypeRunning = "RUNNING" - ClusterEventTypeResizing = "RESIZING" - ClusterEventTypeUpsizeCompleted = "UPSIZE_COMPLETED" - ClusterEventTypeNodesLost = "NODES_LOST" - ClusterEventTypeDriverHealthy = "DRIVER_HEALTHY" - ClusterEventTypeDriverUnavailable = "DRIVER_UNAVAILABLE" - ClusterEventTypeSparkException = "SPARK_EXCEPTION" - ClusterEventTypeDriverNotResponding = "DRIVER_NOT_RESPONDING" - ClusterEventTypeDbfsDown = "DBFS_DOWN" - ClusterEventTypeMetastoreDown = "METASTORE_DOWN" - ClusterEventTypeAutoscalingStatsReport = "AUTOSCALING_STATS_REPORT" -) +package models + +type ClusterEventType string + +const ( + ClusterEventTypeCreating = "CREATING" + ClusterEventTypeDidNotExpandDisk = "DID_NOT_EXPAND_DISK" + ClusterEventTypeExpandedDisk = "EXPANDED_DISK" + ClusterEventTypeFailedToExpandDisk = "FAILED_TO_EXPAND_DISK" + ClusterEventTypeInitScriptStarting = "INIT_SCRIPTS_STARTING" + ClusterEventTypeInitScriptFinished = "INIT_SCRIPTS_FINISHED" + ClusterEventTypeStarting = "STARTING" + ClusterEventTypeRestarting = "RESTARTING" + ClusterEventTypeTerminating = "TERMINATING" + ClusterEventTypeEdited = "EDITED" + ClusterEventTypeRunning = "RUNNING" + ClusterEventTypeResizing = "RESIZING" + ClusterEventTypeUpsizeCompleted = "UPSIZE_COMPLETED" + ClusterEventTypeNodesLost = "NODES_LOST" + ClusterEventTypeDriverHealthy = "DRIVER_HEALTHY" + ClusterEventTypeDriverUnavailable = "DRIVER_UNAVAILABLE" + ClusterEventTypeSparkException = "SPARK_EXCEPTION" + ClusterEventTypeDriverNotResponding = "DRIVER_NOT_RESPONDING" + ClusterEventTypeDbfsDown = "DBFS_DOWN" + ClusterEventTypeMetastoreDown = "METASTORE_DOWN" + ClusterEventTypeAutoscalingStatsReport = "AUTOSCALING_STATS_REPORT" +) diff --git a/azure/models/ClusterInfo.go b/azure/clusters/models/ClusterInfo.go similarity index 98% rename from azure/models/ClusterInfo.go rename to azure/clusters/models/ClusterInfo.go index 96c0130..a595231 100644 --- a/azure/models/ClusterInfo.go +++ b/azure/clusters/models/ClusterInfo.go @@ -1,32 +1,32 @@ -package models - -type ClusterInfo struct { - NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"` - AutoScale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"` - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"` - Driver *SparkNode `json:"driver,omitempty" url:"driver,omitempty"` - Executors []SparkNode `json:"executors,omitempty" url:"executors,omitempty"` - SparkContextID int64 `json:"spark_context_id,omitempty" url:"spark_context_id,omitempty"` - JdbcPort int32 `json:"jdbc_port,omitempty" url:"jdbc_port,omitempty"` - ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"` - SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"` - SparkConf *SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"` - NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` - DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"` - ClusterLogConf *ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"` - InitScripts []InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"` - SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"` - AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"` - State *ClusterState `json:"state,omitempty" url:"state,omitempty"` - StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"` - StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"` - TerminateTime int64 `json:"terminate_time,omitempty" url:"terminate_time,omitempty"` - LastStateLossTime int64 `json:"last_state_loss_time,omitempty" url:"last_state_loss_time,omitempty"` - LastActivityTime int64 `json:"last_activity_time,omitempty" url:"last_activity_time,omitempty"` - ClusterMemoryMb int64 `json:"cluster_memory_mb,omitempty" url:"cluster_memory_mb,omitempty"` - ClusterCores float32 `json:"cluster_cores,omitempty" url:"cluster_cores,omitempty"` - DefaultTags map[string]string `json:"default_tags,omitempty" url:"default_tags,omitempty"` - ClusterLogStatus *LogSyncStatus `json:"cluster_log_status,omitempty" url:"cluster_log_status,omitempty"` - TerminationReason *TerminationReason `json:"termination_reason,omitempty" url:"termination_reason,omitempty"` -} +package models + +type ClusterInfo struct { + NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"` + AutoScale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"` + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"` + Driver *SparkNode `json:"driver,omitempty" url:"driver,omitempty"` + Executors []SparkNode `json:"executors,omitempty" url:"executors,omitempty"` + SparkContextID int64 `json:"spark_context_id,omitempty" url:"spark_context_id,omitempty"` + JdbcPort int32 `json:"jdbc_port,omitempty" url:"jdbc_port,omitempty"` + ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"` + SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"` + SparkConf *SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"` + NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` + DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"` + ClusterLogConf *ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"` + InitScripts []InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"` + SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"` + AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"` + State *ClusterState `json:"state,omitempty" url:"state,omitempty"` + StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"` + StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"` + TerminateTime int64 `json:"terminate_time,omitempty" url:"terminate_time,omitempty"` + LastStateLossTime int64 `json:"last_state_loss_time,omitempty" url:"last_state_loss_time,omitempty"` + LastActivityTime int64 `json:"last_activity_time,omitempty" url:"last_activity_time,omitempty"` + ClusterMemoryMb int64 `json:"cluster_memory_mb,omitempty" url:"cluster_memory_mb,omitempty"` + ClusterCores float32 `json:"cluster_cores,omitempty" url:"cluster_cores,omitempty"` + DefaultTags map[string]string `json:"default_tags,omitempty" url:"default_tags,omitempty"` + ClusterLogStatus *LogSyncStatus `json:"cluster_log_status,omitempty" url:"cluster_log_status,omitempty"` + TerminationReason *TerminationReason `json:"termination_reason,omitempty" url:"termination_reason,omitempty"` +} diff --git a/azure/models/ClusterLogConf.go b/azure/clusters/models/ClusterLogConf.go similarity index 95% rename from azure/models/ClusterLogConf.go rename to azure/clusters/models/ClusterLogConf.go index 7c73b52..3733887 100644 --- a/azure/models/ClusterLogConf.go +++ b/azure/clusters/models/ClusterLogConf.go @@ -1,5 +1,5 @@ -package models - -type ClusterLogConf struct { - Dbfs *DbfsStorageInfo `json:"dbfs,omitempty" url:"dbfs,omitempty"` -} +package models + +type ClusterLogConf struct { + Dbfs *DbfsStorageInfo `json:"dbfs,omitempty" url:"dbfs,omitempty"` +} diff --git a/azure/models/ClusterSize.go b/azure/clusters/models/ClusterSize.go similarity index 97% rename from azure/models/ClusterSize.go rename to azure/clusters/models/ClusterSize.go index 74ac9bb..502cf5d 100644 --- a/azure/models/ClusterSize.go +++ b/azure/clusters/models/ClusterSize.go @@ -1,6 +1,6 @@ -package models - -type ClusterSize struct { - NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"` - Autoscale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"` -} +package models + +type ClusterSize struct { + NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"` + Autoscale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"` +} diff --git a/azure/models/ClusterSource.go b/azure/clusters/models/ClusterSource.go similarity index 93% rename from azure/models/ClusterSource.go rename to azure/clusters/models/ClusterSource.go index 71d1680..7131a35 100644 --- a/azure/models/ClusterSource.go +++ b/azure/clusters/models/ClusterSource.go @@ -1,9 +1,9 @@ -package models - -type ClusterSource string - -const ( - ClusterSourceUI = "UI" - ClusterSourceJob = "JOB" - ClusterSourceAPI = "API" -) +package models + +type ClusterSource string + +const ( + ClusterSourceUI = "UI" + ClusterSourceJob = "JOB" + ClusterSourceAPI = "API" +) diff --git a/azure/models/ClusterState.go b/azure/clusters/models/ClusterState.go similarity index 96% rename from azure/models/ClusterState.go rename to azure/clusters/models/ClusterState.go index b3de5c8..93cf42a 100644 --- a/azure/models/ClusterState.go +++ b/azure/clusters/models/ClusterState.go @@ -1,14 +1,14 @@ -package models - -type ClusterState string - -const ( - ClusterStatePending = "PENDING" - ClusterStateRunning = "RUNNING" - ClusterStateRestarting = "RESTARTING" - ClusterStateResizing = "RESIZING" - ClusterStateTerminating = "TERMINATING" - ClusterStateError = "ERROR" - ClusterStateUnknown = "UNKNOWN" - ClusterStateTERMINATED = "TERMINATED" -) +package models + +type ClusterState string + +const ( + ClusterStatePending = "PENDING" + ClusterStateRunning = "RUNNING" + ClusterStateRestarting = "RESTARTING" + ClusterStateResizing = "RESIZING" + ClusterStateTerminating = "TERMINATING" + ClusterStateError = "ERROR" + ClusterStateUnknown = "UNKNOWN" + ClusterStateTERMINATED = "TERMINATED" +) diff --git a/azure/models/ClusterTag.go b/azure/clusters/models/ClusterTag.go similarity index 96% rename from azure/models/ClusterTag.go rename to azure/clusters/models/ClusterTag.go index c37c004..2a2d227 100644 --- a/azure/models/ClusterTag.go +++ b/azure/clusters/models/ClusterTag.go @@ -1,6 +1,6 @@ -package models - -type ClusterTag struct { - Key string `json:"key,omitempty" url:"key,omitempty"` - Value string `json:"value,omitempty" url:"value,omitempty"` -} +package models + +type ClusterTag struct { + Key string `json:"key,omitempty" url:"key,omitempty"` + Value string `json:"value,omitempty" url:"value,omitempty"` +} diff --git a/azure/models/DbfsStorageInfo.go b/azure/clusters/models/DbfsStorageInfo.go similarity index 96% rename from azure/models/DbfsStorageInfo.go rename to azure/clusters/models/DbfsStorageInfo.go index 8d8525f..7875924 100644 --- a/azure/models/DbfsStorageInfo.go +++ b/azure/clusters/models/DbfsStorageInfo.go @@ -1,5 +1,5 @@ -package models - -type DbfsStorageInfo struct { - Destination string `json:"destination,omitempty" url:"destination,omitempty"` -} +package models + +type DbfsStorageInfo struct { + Destination string `json:"destination,omitempty" url:"destination,omitempty"` +} diff --git a/azure/models/DiskSpec.go b/azure/clusters/models/DiskSpec.go similarity index 97% rename from azure/models/DiskSpec.go rename to azure/clusters/models/DiskSpec.go index aba9fb2..51a9752 100644 --- a/azure/models/DiskSpec.go +++ b/azure/clusters/models/DiskSpec.go @@ -1,7 +1,7 @@ -package models - -type DiskSpec struct { - DiskType *DiskType `json:"disk_type,omitempty" url:"disk_type,omitempty"` - DiskCount int32 `json:"disk_count,omitempty" url:"disk_count,omitempty"` - DiskSize int32 `json:"disk_size,omitempty" url:"disk_size,omitempty"` -} +package models + +type DiskSpec struct { + DiskType *DiskType `json:"disk_type,omitempty" url:"disk_type,omitempty"` + DiskCount int32 `json:"disk_count,omitempty" url:"disk_count,omitempty"` + DiskSize int32 `json:"disk_size,omitempty" url:"disk_size,omitempty"` +} diff --git a/azure/models/DiskType.go b/azure/clusters/models/DiskType.go similarity index 97% rename from azure/models/DiskType.go rename to azure/clusters/models/DiskType.go index 18b022f..21eb146 100644 --- a/azure/models/DiskType.go +++ b/azure/clusters/models/DiskType.go @@ -1,5 +1,5 @@ -package models - -type DiskType struct { - AzureDiskVolumeType AzureDiskVolumeType `json:"azure_disk_volume_type,omitempty" url:"azure_disk_volume_type,omitempty"` -} +package models + +type DiskType struct { + AzureDiskVolumeType AzureDiskVolumeType `json:"azure_disk_volume_type,omitempty" url:"azure_disk_volume_type,omitempty"` +} diff --git a/azure/models/DockerBasicAuth.go b/azure/clusters/models/DockerBasicAuth.go similarity index 97% rename from azure/models/DockerBasicAuth.go rename to azure/clusters/models/DockerBasicAuth.go index 585c243..2377708 100644 --- a/azure/models/DockerBasicAuth.go +++ b/azure/clusters/models/DockerBasicAuth.go @@ -1,6 +1,6 @@ -package models - -type DockerBasicAuth struct { - Username string `json:"username,omitempty" url:"username,omitempty"` - Password string `json:"password,omitempty" url:"password,omitempty"` +package models + +type DockerBasicAuth struct { + Username string `json:"username,omitempty" url:"username,omitempty"` + Password string `json:"password,omitempty" url:"password,omitempty"` } \ No newline at end of file diff --git a/azure/models/DockerImage.go b/azure/clusters/models/DockerImage.go similarity index 97% rename from azure/models/DockerImage.go rename to azure/clusters/models/DockerImage.go index b564219..62a1384 100644 --- a/azure/models/DockerImage.go +++ b/azure/clusters/models/DockerImage.go @@ -1,6 +1,6 @@ -package models - -type DockerImage struct { - Url string `json:"url,omitempty" url:"url,omitempty"` - BasicAuth DockerBasicAuth `json:"basic_auth,omitempty" url:"basic_auth,omitempty"` +package models + +type DockerImage struct { + Url string `json:"url,omitempty" url:"url,omitempty"` + BasicAuth DockerBasicAuth `json:"basic_auth,omitempty" url:"basic_auth,omitempty"` } \ No newline at end of file diff --git a/azure/models/EventDetails.go b/azure/clusters/models/EventDetails.go similarity index 98% rename from azure/models/EventDetails.go rename to azure/clusters/models/EventDetails.go index 84596a5..91886bc 100644 --- a/azure/models/EventDetails.go +++ b/azure/clusters/models/EventDetails.go @@ -1,10 +1,10 @@ -package models - -type EventDetails struct { - CurrentNumWorkers int32 `json:"current_num_workers,omitempty" url:"current_num_workers,omitempty"` - TargetNumWorkers int32 `json:"target_num_workers,omitempty" url:"target_num_workers,omitempty"` - PreviousAttributes *ClusterAttributes `json:"previous_attributes,omitempty" url:"previous_attributes,omitempty"` - Attributes *ClusterAttributes `json:"attributes,omitempty" url:"attributes,omitempty"` - PreviousClusterSize *ClusterSize `json:"previous_cluster_size,omitempty" url:"previous_cluster_size,omitempty"` - ClusterSize *ClusterSize `json:"cluster_size,omitempty" url:"cluster_size,omitempty"` -} +package models + +type EventDetails struct { + CurrentNumWorkers int32 `json:"current_num_workers,omitempty" url:"current_num_workers,omitempty"` + TargetNumWorkers int32 `json:"target_num_workers,omitempty" url:"target_num_workers,omitempty"` + PreviousAttributes *ClusterAttributes `json:"previous_attributes,omitempty" url:"previous_attributes,omitempty"` + Attributes *ClusterAttributes `json:"attributes,omitempty" url:"attributes,omitempty"` + PreviousClusterSize *ClusterSize `json:"previous_cluster_size,omitempty" url:"previous_cluster_size,omitempty"` + ClusterSize *ClusterSize `json:"cluster_size,omitempty" url:"cluster_size,omitempty"` +} diff --git a/azure/models/InitScriptInfo.go b/azure/clusters/models/InitScriptInfo.go similarity index 95% rename from azure/models/InitScriptInfo.go rename to azure/clusters/models/InitScriptInfo.go index 2db389d..cd32284 100644 --- a/azure/models/InitScriptInfo.go +++ b/azure/clusters/models/InitScriptInfo.go @@ -1,5 +1,5 @@ -package models - -type InitScriptInfo struct { - Dbfs *DbfsStorageInfo `json:"dbfs,omitempty" url:"dbfs,omitempty"` -} +package models + +type InitScriptInfo struct { + Dbfs *DbfsStorageInfo `json:"dbfs,omitempty" url:"dbfs,omitempty"` +} diff --git a/azure/models/InstancePoolAndStats.go b/azure/clusters/models/InstancePoolAndStats.go similarity index 98% rename from azure/models/InstancePoolAndStats.go rename to azure/clusters/models/InstancePoolAndStats.go index 51c72f9..c92cee3 100644 --- a/azure/models/InstancePoolAndStats.go +++ b/azure/clusters/models/InstancePoolAndStats.go @@ -1,17 +1,17 @@ -package models - -type InstancePoolAndStats struct { - InstancePoolName string `json:"instance_pool_name,omitempty" url:"instance_pool_name,omitempty"` - MinIdleInstances int32 `json:"min_idle_instances,omitempty" url:"min_idle_instances,omitempty"` - MaxCapacity int32 `json:"max_capacity,omitempty" url:"max_capacity,omitempty"` - NodetypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` - CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"` - IdleInstanceAutoterminationMinutes int32 `json:"idle_instance_autotermination_minutes,omitempty" url:"idle_instance_autotermination_minutes,omitempty"` - EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"` - DiskSpec DiskSpec `json:"disk_spec,omitempty" url:"disk_spec,omitempty"` - PreloadedSparkVersions []string `json:"preloaded_spark_versions,omitempty" url:"preloaded_spark_versions,omitempty"` - InstancePoolID string `json:"instance_pool_id,omitempty" url:"instance_pool_id,omitempty"` - DefaultTags []ClusterTag `json:"default_tags,omitempty" url:"default_tags,omitempty"` - State InstancePoolState `json:"state,omitempty" url:"state,omitempty"` - Stats InstancePoolStats `json:"stats,omitempty" url:"stats,omitempty"` -} +package models + +type InstancePoolAndStats struct { + InstancePoolName string `json:"instance_pool_name,omitempty" url:"instance_pool_name,omitempty"` + MinIdleInstances int32 `json:"min_idle_instances,omitempty" url:"min_idle_instances,omitempty"` + MaxCapacity int32 `json:"max_capacity,omitempty" url:"max_capacity,omitempty"` + NodetypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` + CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"` + IdleInstanceAutoterminationMinutes int32 `json:"idle_instance_autotermination_minutes,omitempty" url:"idle_instance_autotermination_minutes,omitempty"` + EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"` + DiskSpec DiskSpec `json:"disk_spec,omitempty" url:"disk_spec,omitempty"` + PreloadedSparkVersions []string `json:"preloaded_spark_versions,omitempty" url:"preloaded_spark_versions,omitempty"` + InstancePoolID string `json:"instance_pool_id,omitempty" url:"instance_pool_id,omitempty"` + DefaultTags []ClusterTag `json:"default_tags,omitempty" url:"default_tags,omitempty"` + State InstancePoolState `json:"state,omitempty" url:"state,omitempty"` + Stats InstancePoolStats `json:"stats,omitempty" url:"stats,omitempty"` +} diff --git a/azure/models/InstancePoolState.go b/azure/clusters/models/InstancePoolState.go similarity index 94% rename from azure/models/InstancePoolState.go rename to azure/clusters/models/InstancePoolState.go index 0c8be17..f00e7cd 100644 --- a/azure/models/InstancePoolState.go +++ b/azure/clusters/models/InstancePoolState.go @@ -1,8 +1,8 @@ -package models - -type InstancePoolState string - -const ( - InstancePoolStateActive = "ACTIVE" - InstancePoolStateDeleted = "DELETED" -) +package models + +type InstancePoolState string + +const ( + InstancePoolStateActive = "ACTIVE" + InstancePoolStateDeleted = "DELETED" +) diff --git a/azure/models/InstancePoolStats.go b/azure/clusters/models/InstancePoolStats.go similarity index 98% rename from azure/models/InstancePoolStats.go rename to azure/clusters/models/InstancePoolStats.go index 3baf900..a394f24 100644 --- a/azure/models/InstancePoolStats.go +++ b/azure/clusters/models/InstancePoolStats.go @@ -1,8 +1,8 @@ -package models - -type InstancePoolStats struct { - UsedCount int32 `json:"used_count,omitempty" url:"used_count,omitempty"` - IdleCount int32 `json:"idle_count,omitempty" url:"idle_count,omitempty"` - PendingUsedCount int32 `json:"pending_used_count,omitempty" url:"pending_used_count,omitempty"` - PendingIdleCount int32 `json:"pending_idle_count,omitempty" url:"pending_idle_count,omitempty"` -} +package models + +type InstancePoolStats struct { + UsedCount int32 `json:"used_count,omitempty" url:"used_count,omitempty"` + IdleCount int32 `json:"idle_count,omitempty" url:"idle_count,omitempty"` + PendingUsedCount int32 `json:"pending_used_count,omitempty" url:"pending_used_count,omitempty"` + PendingIdleCount int32 `json:"pending_idle_count,omitempty" url:"pending_idle_count,omitempty"` +} diff --git a/azure/models/LogSyncStatus.go b/azure/clusters/models/LogSyncStatus.go similarity index 97% rename from azure/models/LogSyncStatus.go rename to azure/clusters/models/LogSyncStatus.go index da42624..f2bb45e 100644 --- a/azure/models/LogSyncStatus.go +++ b/azure/clusters/models/LogSyncStatus.go @@ -1,6 +1,6 @@ -package models - -type LogSyncStatus struct { - LastAttempted int64 `json:"last_attempted,omitempty" url:"last_attempted,omitempty"` - LastException string `json:"last_exception,omitempty" url:"last_exception,omitempty"` -} +package models + +type LogSyncStatus struct { + LastAttempted int64 `json:"last_attempted,omitempty" url:"last_attempted,omitempty"` + LastException string `json:"last_exception,omitempty" url:"last_exception,omitempty"` +} diff --git a/azure/models/ParameterPair.go b/azure/clusters/models/ParameterPair.go similarity index 96% rename from azure/models/ParameterPair.go rename to azure/clusters/models/ParameterPair.go index be47579..6914560 100644 --- a/azure/models/ParameterPair.go +++ b/azure/clusters/models/ParameterPair.go @@ -1,6 +1,6 @@ -package models - -type ParameterPair struct { - Key string `json:"key,omitempty" url:"key,omitempty"` - Value string `json:"value,omitempty" url:"value,omitempty"` -} +package models + +type ParameterPair struct { + Key string `json:"key,omitempty" url:"key,omitempty"` + Value string `json:"value,omitempty" url:"value,omitempty"` +} diff --git a/azure/models/SparkConfPair.go b/azure/clusters/models/SparkConfPair.go similarity index 96% rename from azure/models/SparkConfPair.go rename to azure/clusters/models/SparkConfPair.go index 40f96c4..71f81c5 100644 --- a/azure/models/SparkConfPair.go +++ b/azure/clusters/models/SparkConfPair.go @@ -1,6 +1,6 @@ -package models - -type SparkConfPair struct { - Key string `json:"key,omitempty" url:"key,omitempty"` - Value string `json:"value,omitempty" url:"value,omitempty"` -} +package models + +type SparkConfPair struct { + Key string `json:"key,omitempty" url:"key,omitempty"` + Value string `json:"value,omitempty" url:"value,omitempty"` +} diff --git a/azure/models/SparkNode.go b/azure/clusters/models/SparkNode.go similarity index 98% rename from azure/models/SparkNode.go rename to azure/clusters/models/SparkNode.go index 085226b..0e14863 100644 --- a/azure/models/SparkNode.go +++ b/azure/clusters/models/SparkNode.go @@ -1,10 +1,10 @@ -package models - -type SparkNode struct { - PrivateIP string `json:"private_ip,omitempty" url:"private_ip,omitempty"` - PublicDNS string `json:"public_dns,omitempty" url:"public_dns,omitempty"` - NodeID string `json:"node_id,omitempty" url:"node_id,omitempty"` - InstanceID string `json:"instance_id,omitempty" url:"instance_id,omitempty"` - StartTimestamp int64 `json:"start_timestamp,omitempty" url:"start_timestamp,omitempty"` - HostPrivateIP string `json:"host_private_ip,omitempty" url:"host_private_ip,omitempty"` -} +package models + +type SparkNode struct { + PrivateIP string `json:"private_ip,omitempty" url:"private_ip,omitempty"` + PublicDNS string `json:"public_dns,omitempty" url:"public_dns,omitempty"` + NodeID string `json:"node_id,omitempty" url:"node_id,omitempty"` + InstanceID string `json:"instance_id,omitempty" url:"instance_id,omitempty"` + StartTimestamp int64 `json:"start_timestamp,omitempty" url:"start_timestamp,omitempty"` + HostPrivateIP string `json:"host_private_ip,omitempty" url:"host_private_ip,omitempty"` +} diff --git a/azure/models/TerminationCode.go b/azure/clusters/models/TerminationCode.go similarity index 97% rename from azure/models/TerminationCode.go rename to azure/clusters/models/TerminationCode.go index ea47709..3f0a227 100644 --- a/azure/models/TerminationCode.go +++ b/azure/clusters/models/TerminationCode.go @@ -1,20 +1,20 @@ -package models - -type TerminationCode string - -const ( - TerminationCodeUserRequest = "USER_REQUEST" - TerminationCodeJobFinished = "JOB_FINISHED" - TerminationCodeInactivity = "INACTIVITY" - TerminationCodeCloudProviderShutdown = "CLOUD_PROVIDER_SHUTDOWN" - TerminationCodeCommunicationLost = "COMMUNICATION_LOST" - TerminationCodeCloudProviderLaunchFailure = "CLOUD_PROVIDER_LAUNCH_FAILURE" - TerminationCodeSparkStartupFailure = "SPARK_STARTUP_FAILURE" - TerminationCodeInvalidArgument = "INVALID_ARGUMENT" - TerminationCodeUnexpectedLaunchFailure = "UNEXPECTED_LAUNCH_FAILURE" - TerminationCodeInternalError = "INTERNAL_ERROR" - TerminationCodeInstanceUnreachable = "INSTANCE_UNREACHABLE" - TerminationCodeRequestRejected = "REQUEST_REJECTED" - TerminationCodeInitScriptFailure = "INIT_SCRIPT_FAILURE" - TerminationCodeTrialExpired = "TRIAL_EXPIRED" -) +package models + +type TerminationCode string + +const ( + TerminationCodeUserRequest = "USER_REQUEST" + TerminationCodeJobFinished = "JOB_FINISHED" + TerminationCodeInactivity = "INACTIVITY" + TerminationCodeCloudProviderShutdown = "CLOUD_PROVIDER_SHUTDOWN" + TerminationCodeCommunicationLost = "COMMUNICATION_LOST" + TerminationCodeCloudProviderLaunchFailure = "CLOUD_PROVIDER_LAUNCH_FAILURE" + TerminationCodeSparkStartupFailure = "SPARK_STARTUP_FAILURE" + TerminationCodeInvalidArgument = "INVALID_ARGUMENT" + TerminationCodeUnexpectedLaunchFailure = "UNEXPECTED_LAUNCH_FAILURE" + TerminationCodeInternalError = "INTERNAL_ERROR" + TerminationCodeInstanceUnreachable = "INSTANCE_UNREACHABLE" + TerminationCodeRequestRejected = "REQUEST_REJECTED" + TerminationCodeInitScriptFailure = "INIT_SCRIPT_FAILURE" + TerminationCodeTrialExpired = "TRIAL_EXPIRED" +) diff --git a/azure/models/TerminationParameter.go b/azure/clusters/models/TerminationParameter.go similarity index 97% rename from azure/models/TerminationParameter.go rename to azure/clusters/models/TerminationParameter.go index 4192478..4e70611 100644 --- a/azure/models/TerminationParameter.go +++ b/azure/clusters/models/TerminationParameter.go @@ -1,12 +1,12 @@ -package models - -type TerminationParameter string - -const ( - TerminationParameterUsername = "username" - TerminationParameterDatabricksErrorMessage = "databricks_error_message" - TerminationParameterInactivityDurationMin = "inactivity_duration_min" - TerminationParameterInstanceID = "instance_id" - TerminationParameterAzureErrorCode = "azure_error_code" - TerminationParameterAzureErrorMessage = "azure_error_message" -) +package models + +type TerminationParameter string + +const ( + TerminationParameterUsername = "username" + TerminationParameterDatabricksErrorMessage = "databricks_error_message" + TerminationParameterInactivityDurationMin = "inactivity_duration_min" + TerminationParameterInstanceID = "instance_id" + TerminationParameterAzureErrorCode = "azure_error_code" + TerminationParameterAzureErrorMessage = "azure_error_message" +) diff --git a/azure/models/TerminationReason.go b/azure/clusters/models/TerminationReason.go similarity index 97% rename from azure/models/TerminationReason.go rename to azure/clusters/models/TerminationReason.go index ce88778..307401f 100644 --- a/azure/models/TerminationReason.go +++ b/azure/clusters/models/TerminationReason.go @@ -1,6 +1,6 @@ -package models - -type TerminationReason struct { - Code *TerminationCode `json:"code,omitempty" url:"code,omitempty"` - Parameters []ParameterPair `json:"parameters,omitempty" url:"parameters,omitempty"` -} +package models + +type TerminationReason struct { + Code *TerminationCode `json:"code,omitempty" url:"code,omitempty"` + Parameters []ParameterPair `json:"parameters,omitempty" url:"parameters,omitempty"` +} diff --git a/azure/dbfs.go b/azure/dbfs.go index b8b4019..218ec0c 100644 --- a/azure/dbfs.go +++ b/azure/dbfs.go @@ -1,203 +1,203 @@ -package azure - -import ( - "encoding/base64" - "encoding/json" - "net/http" - - "github.com/xinsnake/databricks-sdk-golang/azure/models" -) - -// DbfsAPI exposes the DBFS API -type DbfsAPI struct { - Client DBClient -} - -func (a DbfsAPI) init(client DBClient) DbfsAPI { - a.Client = client - return a -} - -// AddBlock appends a block of data to the stream specified by the input handle -func (a DbfsAPI) AddBlock(handle int64, data []byte) error { - data2 := struct { - Handle int64 `json:"handle,omitempty" url:"handle,omitempty"` - Data string `json:"data,omitempty" url:"data,omitempty"` - }{ - handle, - base64.StdEncoding.EncodeToString(data), - } - _, err := a.Client.performQuery(http.MethodPost, "/dbfs/add-block", data2, nil) - return err -} - -// Close closes the stream specified by the input handle -func (a DbfsAPI) Close(handle int64) error { - data := struct { - Handle int64 `json:"handle,omitempty" url:"handle,omitempty"` - }{ - handle, - } - _, err := a.Client.performQuery(http.MethodPost, "/dbfs/close", data, nil) - return err -} - -// DbfsCreateResponse is the response from Create -type DbfsCreateResponse struct { - Handle int64 `json:"handle,omitempty" url:"handle,omitempty"` -} - -// Create opens a stream to write to a file and returns a handle to this stream -func (a DbfsAPI) Create(path string, overwrite bool) (DbfsCreateResponse, error) { - var createResponse DbfsCreateResponse - - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - Overwrite bool `json:"overwrite,omitempty" url:"overwrite,omitempty"` - }{ - path, - overwrite, - } - resp, err := a.Client.performQuery(http.MethodPost, "/dbfs/create", data, nil) - - if err != nil { - return createResponse, err - } - - err = json.Unmarshal(resp, &createResponse) - return createResponse, err -} - -// Delete deletes the file or directory (optionally recursively delete all files in the directory) -func (a DbfsAPI) Delete(path string, recursive bool) error { - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - Recursive bool `json:"recursive,omitempty" url:"recursive,omitempty"` - }{ - path, - recursive, - } - _, err := a.Client.performQuery(http.MethodPost, "/dbfs/delete", data, nil) - return err -} - -// GetStatus gets the file information of a file or directory -func (a DbfsAPI) GetStatus(path string) (models.FileInfo, error) { - var fileInfo models.FileInfo - - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - }{ - path, - } - resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/get-status", data, nil) - - if err != nil { - return fileInfo, err - } - - err = json.Unmarshal(resp, &fileInfo) - return fileInfo, err -} - -// DbfsListResponse is a list of FileInfo as a response of List -type DbfsListResponse struct { - Files []models.FileInfo `json:"files,omitempty" url:"files,omitempty"` -} - -// List lists the contents of a directory, or details of the file -func (a DbfsAPI) List(path string) ([]models.FileInfo, error) { - var listResponse DbfsListResponse - - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - }{ - path, - } - resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/list", data, nil) - - if err != nil { - return listResponse.Files, err - } - - err = json.Unmarshal(resp, &listResponse) - return listResponse.Files, err -} - -// Mkdirs creates the given directory and necessary parent directories if they do not exist -func (a DbfsAPI) Mkdirs(path string) error { - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - }{ - path, - } - _, err := a.Client.performQuery(http.MethodPost, "/dbfs/mkdirs", data, nil) - return err -} - -// Move moves a file from one location to another location within DBFS -func (a DbfsAPI) Move(sourcePath, destinationPath string) error { - data := struct { - SourcePath string `json:"source_path,omitempty" url:"source_path,omitempty"` - DestinationPath string `json:"destination_path,omitempty" url:"destination_path,omitempty"` - }{ - sourcePath, - destinationPath, - } - _, err := a.Client.performQuery(http.MethodPost, "/dbfs/move", data, nil) - return err -} - -// Put uploads a file through the use of multipart form post -func (a DbfsAPI) Put(path string, contents []byte, overwrite bool) error { - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - Contents string `json:"contents,omitempty" url:"contents,omitempty"` - Overwrite bool `json:"overwrite,omitempty" url:"overwrite,omitempty"` - }{ - path, - base64.StdEncoding.EncodeToString(contents), - overwrite, - } - _, err := a.Client.performQuery(http.MethodPost, "/dbfs/put", data, nil) - return err -} - -// DbfsReadResponse is the response of reading a file -type DbfsReadResponse struct { - BytesRead int64 `json:"bytes_read,omitempty" url:"bytes_read,omitempty"` - Data []byte `json:"data,omitempty" url:"data,omitempty"` -} - -// Read returns the contents of a file -func (a DbfsAPI) Read(path string, offset, length int64) (DbfsReadResponse, error) { - var readResponseBase64 struct { - BytesRead int64 `json:"bytes_read,omitempty" url:"bytes_read,omitempty"` - Data string `json:"data,omitempty" url:"data,omitempty"` - } - var readResponse DbfsReadResponse - - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - Offset int64 `json:"offset,omitempty" url:"offset,omitempty"` - Length int64 `json:"length,omitempty" url:"length,omitempty"` - }{ - path, - offset, - length, - } - resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/read", data, nil) - - if err != nil { - return readResponse, err - } - - err = json.Unmarshal(resp, &readResponseBase64) - if err != nil { - return readResponse, err - } - - readResponse.BytesRead = readResponseBase64.BytesRead - readResponse.Data, err = base64.StdEncoding.DecodeString(readResponseBase64.Data) - return readResponse, err -} +package azure + +import ( + "encoding/base64" + "encoding/json" + "net/http" + + "github.com/xinsnake/databricks-sdk-golang/azure/models" +) + +// DbfsAPI exposes the DBFS API +type DbfsAPI struct { + Client DBClient +} + +func (a DbfsAPI) init(client DBClient) DbfsAPI { + a.Client = client + return a +} + +// AddBlock appends a block of data to the stream specified by the input handle +func (a DbfsAPI) AddBlock(handle int64, data []byte) error { + data2 := struct { + Handle int64 `json:"handle,omitempty" url:"handle,omitempty"` + Data string `json:"data,omitempty" url:"data,omitempty"` + }{ + handle, + base64.StdEncoding.EncodeToString(data), + } + _, err := a.Client.performQuery(http.MethodPost, "/dbfs/add-block", data2, nil) + return err +} + +// Close closes the stream specified by the input handle +func (a DbfsAPI) Close(handle int64) error { + data := struct { + Handle int64 `json:"handle,omitempty" url:"handle,omitempty"` + }{ + handle, + } + _, err := a.Client.performQuery(http.MethodPost, "/dbfs/close", data, nil) + return err +} + +// DbfsCreateResponse is the response from Create +type DbfsCreateResponse struct { + Handle int64 `json:"handle,omitempty" url:"handle,omitempty"` +} + +// Create opens a stream to write to a file and returns a handle to this stream +func (a DbfsAPI) Create(path string, overwrite bool) (DbfsCreateResponse, error) { + var createResponse DbfsCreateResponse + + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + Overwrite bool `json:"overwrite,omitempty" url:"overwrite,omitempty"` + }{ + path, + overwrite, + } + resp, err := a.Client.performQuery(http.MethodPost, "/dbfs/create", data, nil) + + if err != nil { + return createResponse, err + } + + err = json.Unmarshal(resp, &createResponse) + return createResponse, err +} + +// Delete deletes the file or directory (optionally recursively delete all files in the directory) +func (a DbfsAPI) Delete(path string, recursive bool) error { + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + Recursive bool `json:"recursive,omitempty" url:"recursive,omitempty"` + }{ + path, + recursive, + } + _, err := a.Client.performQuery(http.MethodPost, "/dbfs/delete", data, nil) + return err +} + +// GetStatus gets the file information of a file or directory +func (a DbfsAPI) GetStatus(path string) (models.FileInfo, error) { + var fileInfo models.FileInfo + + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + }{ + path, + } + resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/get-status", data, nil) + + if err != nil { + return fileInfo, err + } + + err = json.Unmarshal(resp, &fileInfo) + return fileInfo, err +} + +// DbfsListResponse is a list of FileInfo as a response of List +type DbfsListResponse struct { + Files []models.FileInfo `json:"files,omitempty" url:"files,omitempty"` +} + +// List lists the contents of a directory, or details of the file +func (a DbfsAPI) List(path string) ([]models.FileInfo, error) { + var listResponse DbfsListResponse + + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + }{ + path, + } + resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/list", data, nil) + + if err != nil { + return listResponse.Files, err + } + + err = json.Unmarshal(resp, &listResponse) + return listResponse.Files, err +} + +// Mkdirs creates the given directory and necessary parent directories if they do not exist +func (a DbfsAPI) Mkdirs(path string) error { + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + }{ + path, + } + _, err := a.Client.performQuery(http.MethodPost, "/dbfs/mkdirs", data, nil) + return err +} + +// Move moves a file from one location to another location within DBFS +func (a DbfsAPI) Move(sourcePath, destinationPath string) error { + data := struct { + SourcePath string `json:"source_path,omitempty" url:"source_path,omitempty"` + DestinationPath string `json:"destination_path,omitempty" url:"destination_path,omitempty"` + }{ + sourcePath, + destinationPath, + } + _, err := a.Client.performQuery(http.MethodPost, "/dbfs/move", data, nil) + return err +} + +// Put uploads a file through the use of multipart form post +func (a DbfsAPI) Put(path string, contents []byte, overwrite bool) error { + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + Contents string `json:"contents,omitempty" url:"contents,omitempty"` + Overwrite bool `json:"overwrite,omitempty" url:"overwrite,omitempty"` + }{ + path, + base64.StdEncoding.EncodeToString(contents), + overwrite, + } + _, err := a.Client.performQuery(http.MethodPost, "/dbfs/put", data, nil) + return err +} + +// DbfsReadResponse is the response of reading a file +type DbfsReadResponse struct { + BytesRead int64 `json:"bytes_read,omitempty" url:"bytes_read,omitempty"` + Data []byte `json:"data,omitempty" url:"data,omitempty"` +} + +// Read returns the contents of a file +func (a DbfsAPI) Read(path string, offset, length int64) (DbfsReadResponse, error) { + var readResponseBase64 struct { + BytesRead int64 `json:"bytes_read,omitempty" url:"bytes_read,omitempty"` + Data string `json:"data,omitempty" url:"data,omitempty"` + } + var readResponse DbfsReadResponse + + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + Offset int64 `json:"offset,omitempty" url:"offset,omitempty"` + Length int64 `json:"length,omitempty" url:"length,omitempty"` + }{ + path, + offset, + length, + } + resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/read", data, nil) + + if err != nil { + return readResponse, err + } + + err = json.Unmarshal(resp, &readResponseBase64) + if err != nil { + return readResponse, err + } + + readResponse.BytesRead = readResponseBase64.BytesRead + readResponse.Data, err = base64.StdEncoding.DecodeString(readResponseBase64.Data) + return readResponse, err +} diff --git a/azure/deepcopy_generated.go b/azure/deepcopy_generated.go deleted file mode 100644 index 85fa15d..0000000 --- a/azure/deepcopy_generated.go +++ /dev/null @@ -1,375 +0,0 @@ -// +build !ignore_autogenerated - -// Code generated by deepcopy-gen. DO NOT EDIT. - -package azure - -import ( - models "github.com/xinsnake/databricks-sdk-golang/azure/models" -) - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClustersAPI) DeepCopyInto(out *ClustersAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClustersAPI. -func (in *ClustersAPI) DeepCopy() *ClustersAPI { - if in == nil { - return nil - } - out := new(ClustersAPI) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -// func (in *clusters.EventsResp) DeepCopyInto(out *clusters.EventsResp) { -// *out = *in -// if in.Events != nil { -// in, out := &in.Events, &out.Events -// *out = make([]models.ClusterEvent, len(*in)) -// for i := range *in { -// (*in)[i].DeepCopyInto(&(*out)[i]) -// } -// } -// out.NextPage = in.NextPage -// return -// } - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClustersEventsResponse. -// func (in *ClustersEventsResponse) DeepCopy() *ClustersEventsResponse { -// if in == nil { -// return nil -// } -// out := new(ClustersEventsResponse) -// in.DeepCopyInto(out) -// return out -// } - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DBClient) DeepCopyInto(out *DBClient) { - *out = *in - in.Option.DeepCopyInto(&out.Option) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DBClient. -func (in *DBClient) DeepCopy() *DBClient { - if in == nil { - return nil - } - out := new(DBClient) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DbfsAPI) DeepCopyInto(out *DbfsAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsAPI. -func (in *DbfsAPI) DeepCopy() *DbfsAPI { - if in == nil { - return nil - } - out := new(DbfsAPI) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DbfsCreateResponse) DeepCopyInto(out *DbfsCreateResponse) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsCreateResponse. -func (in *DbfsCreateResponse) DeepCopy() *DbfsCreateResponse { - if in == nil { - return nil - } - out := new(DbfsCreateResponse) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DbfsListResponse) DeepCopyInto(out *DbfsListResponse) { - *out = *in - if in.Files != nil { - in, out := &in.Files, &out.Files - *out = make([]models.FileInfo, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsListResponse. -func (in *DbfsListResponse) DeepCopy() *DbfsListResponse { - if in == nil { - return nil - } - out := new(DbfsListResponse) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DbfsReadResponse) DeepCopyInto(out *DbfsReadResponse) { - *out = *in - if in.Data != nil { - in, out := &in.Data, &out.Data - *out = make([]byte, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsReadResponse. -func (in *DbfsReadResponse) DeepCopy() *DbfsReadResponse { - if in == nil { - return nil - } - out := new(DbfsReadResponse) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *GroupsAPI) DeepCopyInto(out *GroupsAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GroupsAPI. -func (in *GroupsAPI) DeepCopy() *GroupsAPI { - if in == nil { - return nil - } - out := new(GroupsAPI) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *GroupsCreateResponse) DeepCopyInto(out *GroupsCreateResponse) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GroupsCreateResponse. -func (in *GroupsCreateResponse) DeepCopy() *GroupsCreateResponse { - if in == nil { - return nil - } - out := new(GroupsCreateResponse) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InstancePoolsAPI) DeepCopyInto(out *InstancePoolsAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstancePoolsAPI. -func (in *InstancePoolsAPI) DeepCopy() *InstancePoolsAPI { - if in == nil { - return nil - } - out := new(InstancePoolsAPI) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobsAPI) DeepCopyInto(out *JobsAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsAPI. -func (in *JobsAPI) DeepCopy() *JobsAPI { - if in == nil { - return nil - } - out := new(JobsAPI) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobsRunsGetOutputResponse) DeepCopyInto(out *JobsRunsGetOutputResponse) { - *out = *in - out.NotebookOutput = in.NotebookOutput - in.Metadata.DeepCopyInto(&out.Metadata) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsRunsGetOutputResponse. -func (in *JobsRunsGetOutputResponse) DeepCopy() *JobsRunsGetOutputResponse { - if in == nil { - return nil - } - out := new(JobsRunsGetOutputResponse) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobsRunsListResponse) DeepCopyInto(out *JobsRunsListResponse) { - *out = *in - if in.Runs != nil { - in, out := &in.Runs, &out.Runs - *out = make([]models.Run, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsRunsListResponse. -func (in *JobsRunsListResponse) DeepCopy() *JobsRunsListResponse { - if in == nil { - return nil - } - out := new(JobsRunsListResponse) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LibrariesAPI) DeepCopyInto(out *LibrariesAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LibrariesAPI. -func (in *LibrariesAPI) DeepCopy() *LibrariesAPI { - if in == nil { - return nil - } - out := new(LibrariesAPI) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LibrariesClusterStatusResponse) DeepCopyInto(out *LibrariesClusterStatusResponse) { - *out = *in - if in.LibraryStatuses != nil { - in, out := &in.LibraryStatuses, &out.LibraryStatuses - *out = make([]models.LibraryFullStatus, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LibrariesClusterStatusResponse. -func (in *LibrariesClusterStatusResponse) DeepCopy() *LibrariesClusterStatusResponse { - if in == nil { - return nil - } - out := new(LibrariesClusterStatusResponse) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ScimAPI) DeepCopyInto(out *ScimAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScimAPI. -func (in *ScimAPI) DeepCopy() *ScimAPI { - if in == nil { - return nil - } - out := new(ScimAPI) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SecretsAPI) DeepCopyInto(out *SecretsAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecretsAPI. -func (in *SecretsAPI) DeepCopy() *SecretsAPI { - if in == nil { - return nil - } - out := new(SecretsAPI) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TokenAPI) DeepCopyInto(out *TokenAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TokenAPI. -func (in *TokenAPI) DeepCopy() *TokenAPI { - if in == nil { - return nil - } - out := new(TokenAPI) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TokenCreateResponse) DeepCopyInto(out *TokenCreateResponse) { - *out = *in - out.TokenInfo = in.TokenInfo - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TokenCreateResponse. -func (in *TokenCreateResponse) DeepCopy() *TokenCreateResponse { - if in == nil { - return nil - } - out := new(TokenCreateResponse) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *WorkspaceAPI) DeepCopyInto(out *WorkspaceAPI) { - *out = *in - in.Client.DeepCopyInto(&out.Client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkspaceAPI. -func (in *WorkspaceAPI) DeepCopy() *WorkspaceAPI { - if in == nil { - return nil - } - out := new(WorkspaceAPI) - in.DeepCopyInto(out) - return out -} diff --git a/azure/doc.go b/azure/doc.go index f06cd54..6e031fe 100644 --- a/azure/doc.go +++ b/azure/doc.go @@ -1,2 +1,2 @@ -// +k8s:deepcopy-gen=package -package azure +// +k8s:deepcopy-gen=package +package azure diff --git a/azure/groups.go b/azure/groups.go index 2f6fca6..26b8473 100644 --- a/azure/groups.go +++ b/azure/groups.go @@ -1,139 +1,139 @@ -package azure - -import ( - "encoding/json" - "net/http" - - "github.com/xinsnake/databricks-sdk-golang/azure/models" -) - -// GroupsAPI exposes the Groups API -type GroupsAPI struct { - Client DBClient -} - -func (a GroupsAPI) init(client DBClient) GroupsAPI { - a.Client = client - return a -} - -// AddMember adds a user or group to a group -func (a GroupsAPI) AddMember(principalName models.PrincipalName, parentName string) error { - data := struct { - UserName string `json:"user_name,omitempty" url:"user_name,omitempty"` - GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` - ParentName string `json:"parent_name,omitempty" url:"parent_name,omitempty"` - }{ - principalName.UserName, - principalName.GroupName, - parentName, - } - _, err := a.Client.performQuery(http.MethodPost, "/groups/add-member", data, nil) - return err -} - -// GroupsCreateResponse is a response with group name for Create -type GroupsCreateResponse struct { - GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` -} - -// Create creates a new group with the given name -func (a GroupsAPI) Create(groupName string) (GroupsCreateResponse, error) { - var createResponse GroupsCreateResponse - - data := struct { - GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` - }{ - groupName, - } - resp, err := a.Client.performQuery(http.MethodPost, "/groups/create", data, nil) - if err != nil { - return createResponse, err - } - - err = json.Unmarshal(resp, &createResponse) - return createResponse, err -} - -// ListMembers returns all of the members of a particular group -func (a GroupsAPI) ListMembers(groupName string) ([]models.PrincipalName, error) { - var membersResponse struct { - Members []models.PrincipalName `json:"members,omitempty" url:"members,omitempty"` - } - - data := struct { - GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` - }{ - groupName, - } - resp, err := a.Client.performQuery(http.MethodGet, "/groups/list-members", data, nil) - if err != nil { - return membersResponse.Members, err - } - - err = json.Unmarshal(resp, &membersResponse) - return membersResponse.Members, err -} - -// List returns all of the groups in an organization -func (a GroupsAPI) List() ([]string, error) { - var listResponse struct { - GroupNames []string `json:"group_names,omitempty" url:"group_names,omitempty"` - } - - resp, err := a.Client.performQuery(http.MethodGet, "/groups/list", nil, nil) - if err != nil { - return listResponse.GroupNames, err - } - - err = json.Unmarshal(resp, &listResponse) - return listResponse.GroupNames, err -} - -// ListParents retrieves all groups in which a given user or group is a member -func (a GroupsAPI) ListParents(principalName models.PrincipalName) ([]string, error) { - var listParentsResponse struct { - GroupNames []string `json:"group_names,omitempty" url:"group_names,omitempty"` - } - - data := struct { - UserName string `json:"user_name,omitempty" url:"user_name,omitempty"` - GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` - }{ - principalName.UserName, - principalName.GroupName, - } - resp, err := a.Client.performQuery(http.MethodGet, "/groups/list-members", data, nil) - if err != nil { - return listParentsResponse.GroupNames, err - } - - err = json.Unmarshal(resp, &listParentsResponse) - return listParentsResponse.GroupNames, err -} - -// RemoveMember removes a user or group from a group -func (a GroupsAPI) RemoveMember(principalName models.PrincipalName, parentName string) error { - data := struct { - UserName string `json:"user_name,omitempty" url:"user_name,omitempty"` - GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` - ParentName string `json:"parent_name,omitempty" url:"parent_name,omitempty"` - }{ - principalName.UserName, - principalName.GroupName, - parentName, - } - _, err := a.Client.performQuery(http.MethodPost, "/groups/remove-member", data, nil) - return err -} - -// Delete removes a group from this organization -func (a GroupsAPI) Delete(groupName string) error { - data := struct { - GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` - }{ - groupName, - } - _, err := a.Client.performQuery(http.MethodPost, "/groups/delete", data, nil) - return err -} +package azure + +import ( + "encoding/json" + "net/http" + + "github.com/xinsnake/databricks-sdk-golang/azure/models" +) + +// GroupsAPI exposes the Groups API +type GroupsAPI struct { + Client DBClient +} + +func (a GroupsAPI) init(client DBClient) GroupsAPI { + a.Client = client + return a +} + +// AddMember adds a user or group to a group +func (a GroupsAPI) AddMember(principalName models.PrincipalName, parentName string) error { + data := struct { + UserName string `json:"user_name,omitempty" url:"user_name,omitempty"` + GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` + ParentName string `json:"parent_name,omitempty" url:"parent_name,omitempty"` + }{ + principalName.UserName, + principalName.GroupName, + parentName, + } + _, err := a.Client.performQuery(http.MethodPost, "/groups/add-member", data, nil) + return err +} + +// GroupsCreateResponse is a response with group name for Create +type GroupsCreateResponse struct { + GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` +} + +// Create creates a new group with the given name +func (a GroupsAPI) Create(groupName string) (GroupsCreateResponse, error) { + var createResponse GroupsCreateResponse + + data := struct { + GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` + }{ + groupName, + } + resp, err := a.Client.performQuery(http.MethodPost, "/groups/create", data, nil) + if err != nil { + return createResponse, err + } + + err = json.Unmarshal(resp, &createResponse) + return createResponse, err +} + +// ListMembers returns all of the members of a particular group +func (a GroupsAPI) ListMembers(groupName string) ([]models.PrincipalName, error) { + var membersResponse struct { + Members []models.PrincipalName `json:"members,omitempty" url:"members,omitempty"` + } + + data := struct { + GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` + }{ + groupName, + } + resp, err := a.Client.performQuery(http.MethodGet, "/groups/list-members", data, nil) + if err != nil { + return membersResponse.Members, err + } + + err = json.Unmarshal(resp, &membersResponse) + return membersResponse.Members, err +} + +// List returns all of the groups in an organization +func (a GroupsAPI) List() ([]string, error) { + var listResponse struct { + GroupNames []string `json:"group_names,omitempty" url:"group_names,omitempty"` + } + + resp, err := a.Client.performQuery(http.MethodGet, "/groups/list", nil, nil) + if err != nil { + return listResponse.GroupNames, err + } + + err = json.Unmarshal(resp, &listResponse) + return listResponse.GroupNames, err +} + +// ListParents retrieves all groups in which a given user or group is a member +func (a GroupsAPI) ListParents(principalName models.PrincipalName) ([]string, error) { + var listParentsResponse struct { + GroupNames []string `json:"group_names,omitempty" url:"group_names,omitempty"` + } + + data := struct { + UserName string `json:"user_name,omitempty" url:"user_name,omitempty"` + GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` + }{ + principalName.UserName, + principalName.GroupName, + } + resp, err := a.Client.performQuery(http.MethodGet, "/groups/list-members", data, nil) + if err != nil { + return listParentsResponse.GroupNames, err + } + + err = json.Unmarshal(resp, &listParentsResponse) + return listParentsResponse.GroupNames, err +} + +// RemoveMember removes a user or group from a group +func (a GroupsAPI) RemoveMember(principalName models.PrincipalName, parentName string) error { + data := struct { + UserName string `json:"user_name,omitempty" url:"user_name,omitempty"` + GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` + ParentName string `json:"parent_name,omitempty" url:"parent_name,omitempty"` + }{ + principalName.UserName, + principalName.GroupName, + parentName, + } + _, err := a.Client.performQuery(http.MethodPost, "/groups/remove-member", data, nil) + return err +} + +// Delete removes a group from this organization +func (a GroupsAPI) Delete(groupName string) error { + data := struct { + GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` + }{ + groupName, + } + _, err := a.Client.performQuery(http.MethodPost, "/groups/delete", data, nil) + return err +} diff --git a/azure/instance_pools.go b/azure/instance_pools.go index e2a63a3..0f279f5 100644 --- a/azure/instance_pools.go +++ b/azure/instance_pools.go @@ -1,11 +1,11 @@ -package azure - -// InstancePoolsAPI exposes the InstancePools API -type InstancePoolsAPI struct { - Client DBClient -} - -func (a InstancePoolsAPI) init(client DBClient) InstancePoolsAPI { - a.Client = client - return a -} +package azure + +// InstancePoolsAPI exposes the InstancePools API +type InstancePoolsAPI struct { + Client DBClient +} + +func (a InstancePoolsAPI) init(client DBClient) InstancePoolsAPI { + a.Client = client + return a +} diff --git a/azure/jobs.go b/azure/jobs.go index 5cffc96..a1c4c8f 100644 --- a/azure/jobs.go +++ b/azure/jobs.go @@ -1,252 +1,253 @@ -package azure - -import ( - "encoding/json" - "net/http" - - "github.com/xinsnake/databricks-sdk-golang/azure/models" -) - -// JobsAPI exposes Jobs API endpoints -type JobsAPI struct { - Client DBClient -} - -func (a JobsAPI) init(client DBClient) JobsAPI { - a.Client = client - return a -} - -// Create creates a new job -func (a JobsAPI) Create(jobSettings models.JobSettings) (models.Job, error) { - var job models.Job - - resp, err := a.Client.performQuery(http.MethodPost, "/jobs/create", jobSettings, nil) - if err != nil { - return job, err - } - - err = json.Unmarshal(resp, &job) - return job, err -} - -// JobsListResponse is the response type returned by JobsList -type JobsListResponse = struct { - Jobs []models.Job `json:"jobs,omitempty" url:"jobs,omitempty"` -} - -// List lists all jobs -func (a JobsAPI) List() ([]models.Job, error) { - var jobsList JobsListResponse - - resp, err := a.Client.performQuery(http.MethodGet, "/jobs/list", nil, nil) - if err != nil { - return jobsList.Jobs, err - } - - err = json.Unmarshal(resp, &jobsList) - return jobsList.Jobs, err -} - -// Delete deletes a job by ID -func (a JobsAPI) Delete(jobID int64) error { - data := struct { - JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` - }{ - jobID, - } - _, err := a.Client.performQuery(http.MethodPost, "/jobs/delete", data, nil) - return err -} - -// Get gets a job by ID -func (a JobsAPI) Get(jobID int64) (models.Job, error) { - var job models.Job - - data := struct { - JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` - }{ - jobID, - } - resp, err := a.Client.performQuery(http.MethodGet, "/jobs/get", data, nil) - if err != nil { - return job, err - } - - err = json.Unmarshal(resp, &job) - return job, err -} - -// Reset overwrites job settings -func (a JobsAPI) Reset(jobID int64, jobSettings models.JobSettings) error { - data := struct { - JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` - NewSettings models.JobSettings `json:"new_settings,omitempty" url:"new_settings,omitempty"` - }{ - jobID, - jobSettings, - } - _, err := a.Client.performQuery(http.MethodPost, "/jobs/reset", data, nil) - return err -} - -// RunNow runs a job now and return the run_id of the triggered run -func (a JobsAPI) RunNow(jobID int64, runParameters models.RunParameters) (models.Run, error) { - var run models.Run - - data := struct { - JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` - models.RunParameters - }{ - jobID, - runParameters, - } - resp, err := a.Client.performQuery(http.MethodPost, "/jobs/run-now", data, nil) - if err != nil { - return run, err - } - - err = json.Unmarshal(resp, &run) - return run, err -} - -// RunsSubmit submit a one-time run -func (a JobsAPI) RunsSubmit(runName string, clusterSpec models.ClusterSpec, jobTask models.JobTask, timeoutSeconds int32) (models.Run, error) { - var run models.Run - - data := struct { - RunName string `json:"run_name,omitempty" url:"run_name,omitempty"` - models.ClusterSpec - models.JobTask - TimeoutSeconds int32 `json:"timeout_seconds,omitempty" url:"timeout_seconds,omitempty"` - }{ - runName, - clusterSpec, - jobTask, - timeoutSeconds, - } - resp, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/submit", data, nil) - if err != nil { - return run, err - } - - err = json.Unmarshal(resp, &run) - return run, err -} - -// JobsRunsListResponse is the response type returned by RunsList -type JobsRunsListResponse struct { - Runs []models.Run `json:"runs,omitempty" url:"runs,omitempty"` - HasMore bool `json:"has_more,omitempty" url:"has_more,omitempty"` -} - -// RunsList lists runs from most recently started to least -func (a JobsAPI) RunsList(activeOnly, completedOnly bool, jobID int64, offset, limit int32) (JobsRunsListResponse, error) { - var runlistResponse JobsRunsListResponse - - data := struct { - ActiveOnly bool `json:"active_only,omitempty" url:"active_only,omitempty"` - CompletedOnly bool `json:"completed_only,omitempty" url:"completed_only,omitempty"` - JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` - Offset int32 `json:"offset,omitempty" url:"offset,omitempty"` - Limit int32 `json:"limit,omitempty" url:"limit,omitempty"` - }{ - activeOnly, - completedOnly, - jobID, - offset, - limit, - } - resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/list", data, nil) - if err != nil { - return runlistResponse, err - } - - err = json.Unmarshal(resp, &runlistResponse) - return runlistResponse, err -} - -// RunsGet retrieve the metadata of a run -func (a JobsAPI) RunsGet(runID int64) (models.Run, error) { - var run models.Run - - data := struct { - RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` - }{ - runID, - } - resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/get", data, nil) - if err != nil { - return run, err - } - - err = json.Unmarshal(resp, &run) - return run, err -} - -// RunsExport exports and retrieve the job run task -func (a JobsAPI) RunsExport(runID int64) ([]models.ViewItem, error) { - var viewItemsView = struct { - Views []models.ViewItem `json:"views,omitempty" url:"views,omitempty"` - }{} - - data := struct { - RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` - }{ - runID, - } - resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/export", data, nil) - if err != nil { - return viewItemsView.Views, err - } - - err = json.Unmarshal(resp, &viewItemsView) - return viewItemsView.Views, err -} - -// RunsCancel cancels a run -func (a JobsAPI) RunsCancel(runID int64) error { - data := struct { - RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` - }{ - runID, - } - _, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/cancel", data, nil) - return err -} - -// JobsRunsGetOutputResponse is the output of the run -type JobsRunsGetOutputResponse struct { - NotebookOutput models.NotebookOutput `json:"notebook_output,omitempty" url:"notebook_output,omitempty"` - Error string `json:"error,omitempty" url:"error,omitempty"` - Metadata models.Run `json:"metadata,omitempty" url:"metadata,omitempty"` -} - -// RunsGetOutput retrieves the output of a run -func (a JobsAPI) RunsGetOutput(runID int64) (JobsRunsGetOutputResponse, error) { - var runsGetOutputResponse JobsRunsGetOutputResponse - - data := struct { - RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` - }{ - runID, - } - resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/get-output", data, nil) - if err != nil { - return runsGetOutputResponse, err - } - - err = json.Unmarshal(resp, &runsGetOutputResponse) - return runsGetOutputResponse, err -} - -// RunsDelete deletes a non-active run. Returns an error if the run is active. -func (a JobsAPI) RunsDelete(runID int64) error { - data := struct { - RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` - }{ - runID, - } - _, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/delete", data, nil) - return err -} +package azure + +import ( + "encoding/json" + "net/http" + + "github.com/xinsnake/databricks-sdk-golang/azure/models" + jobsModels "github.com/xinsnake/databricks-sdk-golang/azure/jobs/models" +) + +// JobsAPI exposes Jobs API endpoints +type JobsAPI struct { + Client DBClient +} + +func (a JobsAPI) init(client DBClient) JobsAPI { + a.Client = client + return a +} + +// Create creates a new job +func (a JobsAPI) Create(jobSettings jobsModels.JobSettings) (jobsModels.Job, error) { + var job jobsModels.Job + + resp, err := a.Client.performQuery(http.MethodPost, "/jobs/create", jobSettings, nil) + if err != nil { + return job, err + } + + err = json.Unmarshal(resp, &job) + return job, err +} + +// JobsListResponse is the response type returned by JobsList +type JobsListResponse = struct { + Jobs []jobsModels.Job `json:"jobs,omitempty" url:"jobs,omitempty"` +} + +// List lists all jobs +func (a JobsAPI) List() ([]jobsModels.Job, error) { + var jobsList JobsListResponse + + resp, err := a.Client.performQuery(http.MethodGet, "/jobs/list", nil, nil) + if err != nil { + return jobsList.Jobs, err + } + + err = json.Unmarshal(resp, &jobsList) + return jobsList.Jobs, err +} + +// Delete deletes a job by ID +func (a JobsAPI) Delete(jobID int64) error { + data := struct { + JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` + }{ + jobID, + } + _, err := a.Client.performQuery(http.MethodPost, "/jobs/delete", data, nil) + return err +} + +// Get gets a job by ID +func (a JobsAPI) Get(jobID int64) (jobsModels.Job, error) { + var job jobsModels.Job + + data := struct { + JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` + }{ + jobID, + } + resp, err := a.Client.performQuery(http.MethodGet, "/jobs/get", data, nil) + if err != nil { + return job, err + } + + err = json.Unmarshal(resp, &job) + return job, err +} + +// Reset overwrites job settings +func (a JobsAPI) Reset(jobID int64, jobSettings jobsModels.JobSettings) error { + data := struct { + JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` + NewSettings jobsModels.JobSettings `json:"new_settings,omitempty" url:"new_settings,omitempty"` + }{ + jobID, + jobSettings, + } + _, err := a.Client.performQuery(http.MethodPost, "/jobs/reset", data, nil) + return err +} + +// RunNow runs a job now and return the run_id of the triggered run +func (a JobsAPI) RunNow(jobID int64, runParameters jobsModels.RunParameters) (jobsModels.Run, error) { + var run jobsModels.Run + + data := struct { + JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` + jobsModels.RunParameters + }{ + jobID, + runParameters, + } + resp, err := a.Client.performQuery(http.MethodPost, "/jobs/run-now", data, nil) + if err != nil { + return run, err + } + + err = json.Unmarshal(resp, &run) + return run, err +} + +// RunsSubmit submit a one-time run +func (a JobsAPI) RunsSubmit(runName string, clusterSpec jobsModels.ClusterSpec, jobTask jobsModels.JobTask, timeoutSeconds int32) (jobsModels.Run, error) { + var run jobsModels.Run + + data := struct { + RunName string `json:"run_name,omitempty" url:"run_name,omitempty"` + jobsModels.ClusterSpec + jobsModels.JobTask + TimeoutSeconds int32 `json:"timeout_seconds,omitempty" url:"timeout_seconds,omitempty"` + }{ + runName, + clusterSpec, + jobTask, + timeoutSeconds, + } + resp, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/submit", data, nil) + if err != nil { + return run, err + } + + err = json.Unmarshal(resp, &run) + return run, err +} + +// JobsRunsListResponse is the response type returned by RunsList +type JobsRunsListResponse struct { + Runs []jobsModels.Run `json:"runs,omitempty" url:"runs,omitempty"` + HasMore bool `json:"has_more,omitempty" url:"has_more,omitempty"` +} + +// RunsList lists runs from most recently started to least +func (a JobsAPI) RunsList(activeOnly, completedOnly bool, jobID int64, offset, limit int32) (JobsRunsListResponse, error) { + var runlistResponse JobsRunsListResponse + + data := struct { + ActiveOnly bool `json:"active_only,omitempty" url:"active_only,omitempty"` + CompletedOnly bool `json:"completed_only,omitempty" url:"completed_only,omitempty"` + JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` + Offset int32 `json:"offset,omitempty" url:"offset,omitempty"` + Limit int32 `json:"limit,omitempty" url:"limit,omitempty"` + }{ + activeOnly, + completedOnly, + jobID, + offset, + limit, + } + resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/list", data, nil) + if err != nil { + return runlistResponse, err + } + + err = json.Unmarshal(resp, &runlistResponse) + return runlistResponse, err +} + +// RunsGet retrieve the metadata of a run +func (a JobsAPI) RunsGet(runID int64) (jobsModels.Run, error) { + var run jobsModels.Run + + data := struct { + RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` + }{ + runID, + } + resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/get", data, nil) + if err != nil { + return run, err + } + + err = json.Unmarshal(resp, &run) + return run, err +} + +// RunsExport exports and retrieve the job run task +func (a JobsAPI) RunsExport(runID int64) ([]models.ViewItem, error) { + var viewItemsView = struct { + Views []models.ViewItem `json:"views,omitempty" url:"views,omitempty"` + }{} + + data := struct { + RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` + }{ + runID, + } + resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/export", data, nil) + if err != nil { + return viewItemsView.Views, err + } + + err = json.Unmarshal(resp, &viewItemsView) + return viewItemsView.Views, err +} + +// RunsCancel cancels a run +func (a JobsAPI) RunsCancel(runID int64) error { + data := struct { + RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` + }{ + runID, + } + _, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/cancel", data, nil) + return err +} + +// JobsRunsGetOutputResponse is the output of the run +type JobsRunsGetOutputResponse struct { + NotebookOutput models.NotebookOutput `json:"notebook_output,omitempty" url:"notebook_output,omitempty"` + Error string `json:"error,omitempty" url:"error,omitempty"` + Metadata jobsModels.Run `json:"metadata,omitempty" url:"metadata,omitempty"` +} + +// RunsGetOutput retrieves the output of a run +func (a JobsAPI) RunsGetOutput(runID int64) (JobsRunsGetOutputResponse, error) { + var runsGetOutputResponse JobsRunsGetOutputResponse + + data := struct { + RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` + }{ + runID, + } + resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/get-output", data, nil) + if err != nil { + return runsGetOutputResponse, err + } + + err = json.Unmarshal(resp, &runsGetOutputResponse) + return runsGetOutputResponse, err +} + +// RunsDelete deletes a non-active run. Returns an error if the run is active. +func (a JobsAPI) RunsDelete(runID int64) error { + data := struct { + RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` + }{ + runID, + } + _, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/delete", data, nil) + return err +} diff --git a/azure/models/ClusterInstance.go b/azure/jobs/models/ClusterInstance.go similarity index 97% rename from azure/models/ClusterInstance.go rename to azure/jobs/models/ClusterInstance.go index 7cea51a..c7cd031 100644 --- a/azure/models/ClusterInstance.go +++ b/azure/jobs/models/ClusterInstance.go @@ -1,6 +1,6 @@ -package models - -type ClusterInstance struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - SparkContextID string `json:"spark_context_id,omitempty" url:"spark_context_id,omitempty"` -} +package models + +type ClusterInstance struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + SparkContextID string `json:"spark_context_id,omitempty" url:"spark_context_id,omitempty"` +} diff --git a/azure/jobs/models/ClusterSpec.go b/azure/jobs/models/ClusterSpec.go new file mode 100644 index 0000000..82a3759 --- /dev/null +++ b/azure/jobs/models/ClusterSpec.go @@ -0,0 +1,11 @@ +package models + +import ( + "github.com/xinsnake/databricks-sdk-golang/azure/models" + "github.com/xinsnake/databricks-sdk-golang/azure/clusters/httpmodels" +) +type ClusterSpec struct { + ExistingClusterID string `json:"existing_cluster_id,omitempty" url:"existing_cluster_id,omitempty"` + NewCluster *httpmodels.CreateReq `json:"new_cluster,omitempty" url:"new_cluster,omitempty"` + Libraries []models.Library `json:"libraries,omitempty" url:"libraries,omitempty"` +} diff --git a/azure/models/CronSchedule.go b/azure/jobs/models/CronSchedule.go similarity index 97% rename from azure/models/CronSchedule.go rename to azure/jobs/models/CronSchedule.go index 9617d25..c6795ab 100644 --- a/azure/models/CronSchedule.go +++ b/azure/jobs/models/CronSchedule.go @@ -1,6 +1,6 @@ -package models - -type CronSchedule struct { - QuartzCronExpression string `json:"quartz_cron_expression,omitempty" url:"quartz_cron_expression,omitempty"` - TimezoneID string `json:"timezone_id,omitempty" url:"timezone_id,omitempty"` -} +package models + +type CronSchedule struct { + QuartzCronExpression string `json:"quartz_cron_expression,omitempty" url:"quartz_cron_expression,omitempty"` + TimezoneID string `json:"timezone_id,omitempty" url:"timezone_id,omitempty"` +} diff --git a/azure/models/Job.go b/azure/jobs/models/Job.go similarity index 97% rename from azure/models/Job.go rename to azure/jobs/models/Job.go index 925249d..a3ac135 100644 --- a/azure/models/Job.go +++ b/azure/jobs/models/Job.go @@ -1,8 +1,8 @@ -package models - -type Job struct { - JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` - CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"` - Settings *JobSettings `json:"settings,omitempty" url:"settings,omitempty"` - CreatedTime int64 `json:"created_time,omitempty" url:"created_time,omitempty"` -} +package models + +type Job struct { + JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` + CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"` + Settings *JobSettings `json:"settings,omitempty" url:"settings,omitempty"` + CreatedTime int64 `json:"created_time,omitempty" url:"created_time,omitempty"` +} diff --git a/azure/models/JobEmailNotifications.go b/azure/jobs/models/JobEmailNotifications.go similarity index 98% rename from azure/models/JobEmailNotifications.go rename to azure/jobs/models/JobEmailNotifications.go index 141eccc..5d79719 100644 --- a/azure/models/JobEmailNotifications.go +++ b/azure/jobs/models/JobEmailNotifications.go @@ -1,8 +1,8 @@ -package models - -type JobEmailNotifications struct { - OnStart []string `json:"on_start,omitempty" url:"on_start,omitempty"` - OnSuccess []string `json:"on_success,omitempty" url:"on_success,omitempty"` - OnFailure []string `json:"on_failure,omitempty" url:"on_failure,omitempty"` - NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty" url:"no_alert_for_skipped_runs,omitempty"` -} +package models + +type JobEmailNotifications struct { + OnStart []string `json:"on_start,omitempty" url:"on_start,omitempty"` + OnSuccess []string `json:"on_success,omitempty" url:"on_success,omitempty"` + OnFailure []string `json:"on_failure,omitempty" url:"on_failure,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty" url:"no_alert_for_skipped_runs,omitempty"` +} diff --git a/azure/models/JobSettings.go b/azure/jobs/models/JobSettings.go similarity index 79% rename from azure/models/JobSettings.go rename to azure/jobs/models/JobSettings.go index 10936dd..a6b5d08 100644 --- a/azure/models/JobSettings.go +++ b/azure/jobs/models/JobSettings.go @@ -1,19 +1,24 @@ -package models - -type JobSettings struct { - ExistingClusterID string `json:"existing_cluster_id,omitempty" url:"existing_cluster_id,omitempty"` - NewCluster *NewCluster `json:"new_cluster,omitempty" url:"new_cluster,omitempty"` - NotebookTask *NotebookTask `json:"notebook_task,omitempty" url:"notebook_task,omitempty"` - SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" url:"spark_jar_task,omitempty"` - SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" url:"spark_python_task,omitempty"` - SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" url:"spark_submit_task,omitempty"` - Name string `json:"name,omitempty" url:"name,omitempty"` - Libraries []Library `json:"libraries,omitempty" url:"libraries,omitempty"` - EmailNotifications *JobEmailNotifications `json:"email_notifications,omitempty" url:"email_notifications,omitempty"` - TimeoutSeconds int32 `json:"timeout_seconds,omitempty" url:"timeout_seconds,omitempty"` - MaxRetries int32 `json:"max_retries,omitempty" url:"max_retries,omitempty"` - MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty" url:"min_retry_interval_millis,omitempty"` - RetryOnTimeout bool `json:"retry_on_timeout,omitempty" url:"retry_on_timeout,omitempty"` - Schedule *CronSchedule `json:"schedule,omitempty" url:"schedule,omitempty"` - MaxConcurrentRuns int32 `json:"max_concurrent_runs,omitempty" url:"max_concurrent_runs,omitempty"` -} +package models + +import ( + "github.com/xinsnake/databricks-sdk-golang/azure/models" + "github.com/xinsnake/databricks-sdk-golang/azure/clusters/httpmodels" +) + +type JobSettings struct { + ExistingClusterID string `json:"existing_cluster_id,omitempty" url:"existing_cluster_id,omitempty"` + NewCluster *httpmodels.CreateReq `json:"new_cluster,omitempty" url:"new_cluster,omitempty"` + NotebookTask *NotebookTask `json:"notebook_task,omitempty" url:"notebook_task,omitempty"` + SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" url:"spark_jar_task,omitempty"` + SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" url:"spark_python_task,omitempty"` + SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" url:"spark_submit_task,omitempty"` + Name string `json:"name,omitempty" url:"name,omitempty"` + Libraries []models.Library `json:"libraries,omitempty" url:"libraries,omitempty"` + EmailNotifications *JobEmailNotifications `json:"email_notifications,omitempty" url:"email_notifications,omitempty"` + TimeoutSeconds int32 `json:"timeout_seconds,omitempty" url:"timeout_seconds,omitempty"` + MaxRetries int32 `json:"max_retries,omitempty" url:"max_retries,omitempty"` + MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty" url:"min_retry_interval_millis,omitempty"` + RetryOnTimeout bool `json:"retry_on_timeout,omitempty" url:"retry_on_timeout,omitempty"` + Schedule *CronSchedule `json:"schedule,omitempty" url:"schedule,omitempty"` + MaxConcurrentRuns int32 `json:"max_concurrent_runs,omitempty" url:"max_concurrent_runs,omitempty"` +} diff --git a/azure/models/JobTask.go b/azure/jobs/models/JobTask.go similarity index 98% rename from azure/models/JobTask.go rename to azure/jobs/models/JobTask.go index f75c890..3a24a03 100644 --- a/azure/models/JobTask.go +++ b/azure/jobs/models/JobTask.go @@ -1,8 +1,8 @@ -package models - -type JobTask struct { - NotebookTask *NotebookTask `json:"notebook_task,omitempty" url:"notebook_task,omitempty"` - SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" url:"spark_jar_task,omitempty"` - SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" url:"spark_python_task,omitempty"` - SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" url:"spark_submit_task,omitempty"` -} +package models + +type JobTask struct { + NotebookTask *NotebookTask `json:"notebook_task,omitempty" url:"notebook_task,omitempty"` + SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" url:"spark_jar_task,omitempty"` + SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" url:"spark_python_task,omitempty"` + SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" url:"spark_submit_task,omitempty"` +} diff --git a/azure/models/NotebookTask.go b/azure/jobs/models/NotebookTask.go similarity index 97% rename from azure/models/NotebookTask.go rename to azure/jobs/models/NotebookTask.go index 1776143..b839a0b 100644 --- a/azure/models/NotebookTask.go +++ b/azure/jobs/models/NotebookTask.go @@ -1,6 +1,6 @@ -package models - -type NotebookTask struct { - NotebookPath string `json:"notebook_path,omitempty" url:"notebook_path,omitempty"` - BaseParameters map[string]string `json:"base_parameters,omitempty" url:"base_parameters,omitempty"` -} +package models + +type NotebookTask struct { + NotebookPath string `json:"notebook_path,omitempty" url:"notebook_path,omitempty"` + BaseParameters map[string]string `json:"base_parameters,omitempty" url:"base_parameters,omitempty"` +} diff --git a/azure/models/Run.go b/azure/jobs/models/Run.go similarity index 98% rename from azure/models/Run.go rename to azure/jobs/models/Run.go index c38b3c1..4e328b2 100644 --- a/azure/models/Run.go +++ b/azure/jobs/models/Run.go @@ -1,23 +1,23 @@ -package models - -type Run struct { - JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` - RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` - CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"` - NumberInJob int64 `json:"number_in_job,omitempty" url:"number_in_job,omitempty"` - OriginalAttemptRunID int64 `json:"original_attempt_run_id,omitempty" url:"original_attempt_run_id,omitempty"` - State *RunState `json:"state,omitempty" url:"state,omitempty"` - Schedule *CronSchedule `json:"schedule,omitempty" url:"schedule,omitempty"` - Task *JobTask `json:"task,omitempty" url:"task,omitempty"` - ClusterSpec *ClusterSpec `json:"cluster_spec,omitempty" url:"cluster_spec,omitempty"` - ClusterInstance *ClusterInstance `json:"cluster_instance,omitempty" url:"cluster_instance,omitempty"` - OverridingParameters *RunParameters `json:"overriding_parameters,omitempty" url:"overriding_parameters,omitempty"` - StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"` - SetupDuration int64 `json:"setup_duration,omitempty" url:"setup_duration,omitempty"` - ExecutionDuration int64 `json:"execution_duration,omitempty" url:"execution_duration,omitempty"` - CleanupDuration int64 `json:"cleanup_duration,omitempty" url:"cleanup_duration,omitempty"` - Trigger *TriggerType `json:"trigger,omitempty" url:"trigger,omitempty"` - RunName string `json:"run_name,omitempty" url:"run_name,omitempty"` - RunPageURL string `json:"run_page_url,omitempty" url:"run_page_url,omitempty"` - RunType string `json:"run_type,omitempty" url:"run_type,omitempty"` -} +package models + +type Run struct { + JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` + RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"` + CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"` + NumberInJob int64 `json:"number_in_job,omitempty" url:"number_in_job,omitempty"` + OriginalAttemptRunID int64 `json:"original_attempt_run_id,omitempty" url:"original_attempt_run_id,omitempty"` + State *RunState `json:"state,omitempty" url:"state,omitempty"` + Schedule *CronSchedule `json:"schedule,omitempty" url:"schedule,omitempty"` + Task *JobTask `json:"task,omitempty" url:"task,omitempty"` + ClusterSpec *ClusterSpec `json:"cluster_spec,omitempty" url:"cluster_spec,omitempty"` + ClusterInstance *ClusterInstance `json:"cluster_instance,omitempty" url:"cluster_instance,omitempty"` + OverridingParameters *RunParameters `json:"overriding_parameters,omitempty" url:"overriding_parameters,omitempty"` + StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"` + SetupDuration int64 `json:"setup_duration,omitempty" url:"setup_duration,omitempty"` + ExecutionDuration int64 `json:"execution_duration,omitempty" url:"execution_duration,omitempty"` + CleanupDuration int64 `json:"cleanup_duration,omitempty" url:"cleanup_duration,omitempty"` + Trigger *TriggerType `json:"trigger,omitempty" url:"trigger,omitempty"` + RunName string `json:"run_name,omitempty" url:"run_name,omitempty"` + RunPageURL string `json:"run_page_url,omitempty" url:"run_page_url,omitempty"` + RunType string `json:"run_type,omitempty" url:"run_type,omitempty"` +} diff --git a/azure/models/RunLifeCycleState.go b/azure/jobs/models/RunLifeCycleState.go similarity index 96% rename from azure/models/RunLifeCycleState.go rename to azure/jobs/models/RunLifeCycleState.go index 0208ce3..8877768 100644 --- a/azure/models/RunLifeCycleState.go +++ b/azure/jobs/models/RunLifeCycleState.go @@ -1,12 +1,12 @@ -package models - -type RunLifeCycleState string - -const ( - RunLifeCycleStatePending = "PENDING" - RunLifeCycleStateRunning = "RUNNING" - RunLifeCycleStateTerminating = "TERMINATING" - RunLifeCycleStateTerminated = "TERMINATED" - RunLifeCycleStateSkipped = "SKIPPED" - RunLifeCycleStateInternalError = "INTERNAL_ERROR" -) +package models + +type RunLifeCycleState string + +const ( + RunLifeCycleStatePending = "PENDING" + RunLifeCycleStateRunning = "RUNNING" + RunLifeCycleStateTerminating = "TERMINATING" + RunLifeCycleStateTerminated = "TERMINATED" + RunLifeCycleStateSkipped = "SKIPPED" + RunLifeCycleStateInternalError = "INTERNAL_ERROR" +) diff --git a/azure/models/RunParameters.go b/azure/jobs/models/RunParameters.go similarity index 98% rename from azure/models/RunParameters.go rename to azure/jobs/models/RunParameters.go index c3e5de4..4097e63 100644 --- a/azure/models/RunParameters.go +++ b/azure/jobs/models/RunParameters.go @@ -1,8 +1,8 @@ -package models - -type RunParameters struct { - JarParams []string `json:"jar_params,omitempty" url:"jar_params,omitempty"` - NotebookParams map[string]string `json:"notebook_params,omitempty" url:"notebook_params,omitempty"` - PythonParams []string `json:"python_params,omitempty" url:"python_params,omitempty"` - SparkSubmitParams []string `json:"spark_submit_params,omitempty" url:"spark_submit_params,omitempty"` -} +package models + +type RunParameters struct { + JarParams []string `json:"jar_params,omitempty" url:"jar_params,omitempty"` + NotebookParams map[string]string `json:"notebook_params,omitempty" url:"notebook_params,omitempty"` + PythonParams []string `json:"python_params,omitempty" url:"python_params,omitempty"` + SparkSubmitParams []string `json:"spark_submit_params,omitempty" url:"spark_submit_params,omitempty"` +} diff --git a/azure/models/RunResultState.go b/azure/jobs/models/RunResultState.go similarity index 95% rename from azure/models/RunResultState.go rename to azure/jobs/models/RunResultState.go index a03fe59..8b433ba 100644 --- a/azure/models/RunResultState.go +++ b/azure/jobs/models/RunResultState.go @@ -1,10 +1,10 @@ -package models - -type RunResultState string - -const ( - RunResultStateSuccess = "SUCCESS" - RunResultStateFailed = "FAILED" - RunResultStateTimedout = "TIMEDOUT" - RunResultStateCanceled = "CANCELED" -) +package models + +type RunResultState string + +const ( + RunResultStateSuccess = "SUCCESS" + RunResultStateFailed = "FAILED" + RunResultStateTimedout = "TIMEDOUT" + RunResultStateCanceled = "CANCELED" +) diff --git a/azure/models/RunState.go b/azure/jobs/models/RunState.go similarity index 97% rename from azure/models/RunState.go rename to azure/jobs/models/RunState.go index 9c663bf..725c84f 100644 --- a/azure/models/RunState.go +++ b/azure/jobs/models/RunState.go @@ -1,7 +1,7 @@ -package models - -type RunState struct { - LifeCycleState *RunLifeCycleState `json:"life_cycle_state,omitempty" url:"life_cycle_state,omitempty"` - ResultState *RunResultState `json:"result_state,omitempty" url:"result_state,omitempty"` - StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"` -} +package models + +type RunState struct { + LifeCycleState *RunLifeCycleState `json:"life_cycle_state,omitempty" url:"life_cycle_state,omitempty"` + ResultState *RunResultState `json:"result_state,omitempty" url:"result_state,omitempty"` + StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"` +} diff --git a/azure/models/SparkJarTask.go b/azure/jobs/models/SparkJarTask.go similarity index 97% rename from azure/models/SparkJarTask.go rename to azure/jobs/models/SparkJarTask.go index c51437d..3ace25a 100644 --- a/azure/models/SparkJarTask.go +++ b/azure/jobs/models/SparkJarTask.go @@ -1,7 +1,7 @@ -package models - -type SparkJarTask struct { - JarURI string `json:"jar_uri,omitempty" url:"jar_uri,omitempty"` - MainClassName string `json:"main_class_name,omitempty" url:"main_class_name,omitempty"` - Parameters []string `json:"parameters,omitempty" url:"parameters,omitempty"` -} +package models + +type SparkJarTask struct { + JarURI string `json:"jar_uri,omitempty" url:"jar_uri,omitempty"` + MainClassName string `json:"main_class_name,omitempty" url:"main_class_name,omitempty"` + Parameters []string `json:"parameters,omitempty" url:"parameters,omitempty"` +} diff --git a/azure/models/SparkPythonTask.go b/azure/jobs/models/SparkPythonTask.go similarity index 97% rename from azure/models/SparkPythonTask.go rename to azure/jobs/models/SparkPythonTask.go index af5ebf5..7a0443d 100644 --- a/azure/models/SparkPythonTask.go +++ b/azure/jobs/models/SparkPythonTask.go @@ -1,6 +1,6 @@ -package models - -type SparkPythonTask struct { - PythonFile string `json:"python_file,omitempty" url:"python_file,omitempty"` - Parameters []string `json:"parameters,omitempty" url:"parameters,omitempty"` -} +package models + +type SparkPythonTask struct { + PythonFile string `json:"python_file,omitempty" url:"python_file,omitempty"` + Parameters []string `json:"parameters,omitempty" url:"parameters,omitempty"` +} diff --git a/azure/models/SparkSubmitTask.go b/azure/jobs/models/SparkSubmitTask.go similarity index 96% rename from azure/models/SparkSubmitTask.go rename to azure/jobs/models/SparkSubmitTask.go index a492b93..4f3d657 100644 --- a/azure/models/SparkSubmitTask.go +++ b/azure/jobs/models/SparkSubmitTask.go @@ -1,5 +1,5 @@ -package models - -type SparkSubmitTask struct { - Parameters []string `json:"parameters,omitempty" url:"parameters,omitempty"` -} +package models + +type SparkSubmitTask struct { + Parameters []string `json:"parameters,omitempty" url:"parameters,omitempty"` +} diff --git a/azure/models/TriggerType.go b/azure/jobs/models/TriggerType.go similarity index 94% rename from azure/models/TriggerType.go rename to azure/jobs/models/TriggerType.go index dad32c9..c8d93b8 100644 --- a/azure/models/TriggerType.go +++ b/azure/jobs/models/TriggerType.go @@ -1,9 +1,9 @@ -package models - -type TriggerType string - -const ( - TriggerTypePeriodic = "PERIODIC" - TriggerTypeOneTime = "ONE_TIME" - TriggerTypeRetry = "RETRY" -) +package models + +type TriggerType string + +const ( + TriggerTypePeriodic = "PERIODIC" + TriggerTypeOneTime = "ONE_TIME" + TriggerTypeRetry = "RETRY" +) diff --git a/azure/libraries.go b/azure/libraries.go index d03fad1..a2345b0 100644 --- a/azure/libraries.go +++ b/azure/libraries.go @@ -1,83 +1,83 @@ -package azure - -import ( - "encoding/json" - "net/http" - - "github.com/xinsnake/databricks-sdk-golang/azure/models" -) - -// LibrariesAPI exposes the Libraries API -type LibrariesAPI struct { - Client DBClient -} - -func (a LibrariesAPI) init(client DBClient) LibrariesAPI { - a.Client = client - return a -} - -// AllClusterStatuses gets the status of all libraries on all clusters -func (a LibrariesAPI) AllClusterStatuses() ([]models.ClusterLibraryStatuses, error) { - var allClusterStatusesResponse struct { - Statuses []models.ClusterLibraryStatuses `json:"statuses,omitempty" url:"statuses,omitempty"` - } - - resp, err := a.Client.performQuery(http.MethodGet, "/libraries/all-cluster-statuses", nil, nil) - if err != nil { - return allClusterStatusesResponse.Statuses, err - } - - err = json.Unmarshal(resp, &allClusterStatusesResponse) - return allClusterStatusesResponse.Statuses, err -} - -// LibrariesClusterStatusResponse is a response from AllClusterStatuses -type LibrariesClusterStatusResponse struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - LibraryStatuses []models.LibraryFullStatus `json:"library_statuses,omitempty" url:"library_statuses,omitempty"` -} - -// ClusterStatus get the status of libraries on a cluster -func (a LibrariesAPI) ClusterStatus(clusterID string) (LibrariesClusterStatusResponse, error) { - var clusterStatusResponse LibrariesClusterStatusResponse - - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - }{ - clusterID, - } - resp, err := a.Client.performQuery(http.MethodGet, "/libraries/cluster-status", data, nil) - if err != nil { - return clusterStatusResponse, err - } - - err = json.Unmarshal(resp, &clusterStatusResponse) - return clusterStatusResponse, err -} - -// Install installs libraries on a cluster -func (a LibrariesAPI) Install(clusterID string, libraries []models.Library) error { - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - Libraries []models.Library `json:"libraries,omitempty" url:"libraries,omitempty"` - }{ - clusterID, - libraries, - } - _, err := a.Client.performQuery(http.MethodPost, "/libraries/install", data, nil) - return err -} - -// Uninstall sets libraries to be uninstalled on a cluster -func (a LibrariesAPI) Uninstall(clusterID string, libraries []models.Library) error { - data := struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - Libraries []models.Library `json:"libraries,omitempty" url:"libraries,omitempty"` - }{ - clusterID, - libraries, - } - _, err := a.Client.performQuery(http.MethodPost, "/libraries/uninstall", data, nil) - return err -} +package azure + +import ( + "encoding/json" + "net/http" + + "github.com/xinsnake/databricks-sdk-golang/azure/models" +) + +// LibrariesAPI exposes the Libraries API +type LibrariesAPI struct { + Client DBClient +} + +func (a LibrariesAPI) init(client DBClient) LibrariesAPI { + a.Client = client + return a +} + +// AllClusterStatuses gets the status of all libraries on all clusters +func (a LibrariesAPI) AllClusterStatuses() ([]models.ClusterLibraryStatuses, error) { + var allClusterStatusesResponse struct { + Statuses []models.ClusterLibraryStatuses `json:"statuses,omitempty" url:"statuses,omitempty"` + } + + resp, err := a.Client.performQuery(http.MethodGet, "/libraries/all-cluster-statuses", nil, nil) + if err != nil { + return allClusterStatusesResponse.Statuses, err + } + + err = json.Unmarshal(resp, &allClusterStatusesResponse) + return allClusterStatusesResponse.Statuses, err +} + +// LibrariesClusterStatusResponse is a response from AllClusterStatuses +type LibrariesClusterStatusResponse struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + LibraryStatuses []models.LibraryFullStatus `json:"library_statuses,omitempty" url:"library_statuses,omitempty"` +} + +// ClusterStatus get the status of libraries on a cluster +func (a LibrariesAPI) ClusterStatus(clusterID string) (LibrariesClusterStatusResponse, error) { + var clusterStatusResponse LibrariesClusterStatusResponse + + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + }{ + clusterID, + } + resp, err := a.Client.performQuery(http.MethodGet, "/libraries/cluster-status", data, nil) + if err != nil { + return clusterStatusResponse, err + } + + err = json.Unmarshal(resp, &clusterStatusResponse) + return clusterStatusResponse, err +} + +// Install installs libraries on a cluster +func (a LibrariesAPI) Install(clusterID string, libraries []models.Library) error { + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + Libraries []models.Library `json:"libraries,omitempty" url:"libraries,omitempty"` + }{ + clusterID, + libraries, + } + _, err := a.Client.performQuery(http.MethodPost, "/libraries/install", data, nil) + return err +} + +// Uninstall sets libraries to be uninstalled on a cluster +func (a LibrariesAPI) Uninstall(clusterID string, libraries []models.Library) error { + data := struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + Libraries []models.Library `json:"libraries,omitempty" url:"libraries,omitempty"` + }{ + clusterID, + libraries, + } + _, err := a.Client.performQuery(http.MethodPost, "/libraries/uninstall", data, nil) + return err +} diff --git a/azure/models/AclItem.go b/azure/models/AclItem.go index ff5c34a..2c015f9 100644 --- a/azure/models/AclItem.go +++ b/azure/models/AclItem.go @@ -1,6 +1,6 @@ -package models - -type AclItem struct { - Principal string `json:"principal,omitempty" url:"principal,omitempty"` - Permission *AclPermission `json:"permission,omitempty" url:"permission,omitempty"` -} +package models + +type AclItem struct { + Principal string `json:"principal,omitempty" url:"principal,omitempty"` + Permission *AclPermission `json:"permission,omitempty" url:"permission,omitempty"` +} diff --git a/azure/models/AclPermission.go b/azure/models/AclPermission.go index b20f185..2f77ef1 100644 --- a/azure/models/AclPermission.go +++ b/azure/models/AclPermission.go @@ -1,9 +1,9 @@ -package models - -type AclPermission string - -const ( - AclPermissionRead = "READ" - AclPermissionWrite = "WRITE" - AclPermissionManage = "MANAGE" -) +package models + +type AclPermission string + +const ( + AclPermissionRead = "READ" + AclPermissionWrite = "WRITE" + AclPermissionManage = "MANAGE" +) diff --git a/azure/models/ClusterCloudProviderNodeInfo.go b/azure/models/ClusterCloudProviderNodeInfo.go index 1508bb8..257a440 100644 --- a/azure/models/ClusterCloudProviderNodeInfo.go +++ b/azure/models/ClusterCloudProviderNodeInfo.go @@ -1,7 +1,7 @@ -package models - -type ClusterCloudProviderNodeInfo struct { - Status *ClusterCloudProviderNodeStatus `json:"status,omitempty" url:"status,omitempty"` - AvailableCoreQuota int32 `json:"available_core_quota,omitempty" url:"available_core_quota,omitempty"` - TotalCoreQuota int32 `json:"total_core_quota,omitempty" url:"total_core_quota,omitempty"` -} +package models + +type ClusterCloudProviderNodeInfo struct { + Status *ClusterCloudProviderNodeStatus `json:"status,omitempty" url:"status,omitempty"` + AvailableCoreQuota int32 `json:"available_core_quota,omitempty" url:"available_core_quota,omitempty"` + TotalCoreQuota int32 `json:"total_core_quota,omitempty" url:"total_core_quota,omitempty"` +} diff --git a/azure/models/ClusterCloudProviderNodeStatus.go b/azure/models/ClusterCloudProviderNodeStatus.go index 3d40d57..1d23b56 100644 --- a/azure/models/ClusterCloudProviderNodeStatus.go +++ b/azure/models/ClusterCloudProviderNodeStatus.go @@ -1,8 +1,8 @@ -package models - -type ClusterCloudProviderNodeStatus string - -const ( - ClusterCloudProviderNodeStatusNotEnabledOnSubscription = "NotEnabledOnSubscription" - ClusterCloudProviderNodeStatusNotAvailableInRegion = "NotAvailableInRegion" -) +package models + +type ClusterCloudProviderNodeStatus string + +const ( + ClusterCloudProviderNodeStatusNotEnabledOnSubscription = "NotEnabledOnSubscription" + ClusterCloudProviderNodeStatusNotAvailableInRegion = "NotAvailableInRegion" +) diff --git a/azure/models/ClusterLibraryStatuses.go b/azure/models/ClusterLibraryStatuses.go index 69456d0..6392e00 100644 --- a/azure/models/ClusterLibraryStatuses.go +++ b/azure/models/ClusterLibraryStatuses.go @@ -1,6 +1,6 @@ -package models - -type ClusterLibraryStatuses struct { - ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` - LibraryStatuses []LibraryFullStatus `json:"library_statuses,omitempty" url:"library_statuses,omitempty"` -} +package models + +type ClusterLibraryStatuses struct { + ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"` + LibraryStatuses []LibraryFullStatus `json:"library_statuses,omitempty" url:"library_statuses,omitempty"` +} diff --git a/azure/models/ClusterSpec.go b/azure/models/ClusterSpec.go deleted file mode 100644 index 395f56c..0000000 --- a/azure/models/ClusterSpec.go +++ /dev/null @@ -1,7 +0,0 @@ -package models - -type ClusterSpec struct { - ExistingClusterID string `json:"existing_cluster_id,omitempty" url:"existing_cluster_id,omitempty"` - NewCluster *NewCluster `json:"new_cluster,omitempty" url:"new_cluster,omitempty"` - Libraries []Library `json:"libraries,omitempty" url:"libraries,omitempty"` -} diff --git a/azure/models/ExportFormat.go b/azure/models/ExportFormat.go index af811c0..493decf 100644 --- a/azure/models/ExportFormat.go +++ b/azure/models/ExportFormat.go @@ -1,10 +1,10 @@ -package models - -type ExportFormat string - -const ( - ExportFormatSource = "SOURCE" - ExportFormatHtml = "HTML" - ExportFormatJupyter = "JUPYTER" - ExportFormatDbc = "DBC" -) +package models + +type ExportFormat string + +const ( + ExportFormatSource = "SOURCE" + ExportFormatHtml = "HTML" + ExportFormatJupyter = "JUPYTER" + ExportFormatDbc = "DBC" +) diff --git a/azure/models/FileInfo.go b/azure/models/FileInfo.go index 26d33fa..2875616 100644 --- a/azure/models/FileInfo.go +++ b/azure/models/FileInfo.go @@ -1,7 +1,7 @@ -package models - -type FileInfo struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - IsDir bool `json:"is_dir,omitempty" url:"is_dir,omitempty"` - FileSize int64 `json:"file_size,omitempty" url:"file_size,omitempty"` -} +package models + +type FileInfo struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + IsDir bool `json:"is_dir,omitempty" url:"is_dir,omitempty"` + FileSize int64 `json:"file_size,omitempty" url:"file_size,omitempty"` +} diff --git a/azure/models/Language.go b/azure/models/Language.go index 7f1b9d5..e0a9561 100644 --- a/azure/models/Language.go +++ b/azure/models/Language.go @@ -1,10 +1,10 @@ -package models - -type Language string - -const ( - LanguageScala = "SCALA" - LanguagePython = "PYTHON" - LanguageSQL = "SQL" - LanguageR = "R" -) +package models + +type Language string + +const ( + LanguageScala = "SCALA" + LanguagePython = "PYTHON" + LanguageSQL = "SQL" + LanguageR = "R" +) diff --git a/azure/models/Library.go b/azure/models/Library.go index 0cb0800..34f8a8f 100644 --- a/azure/models/Library.go +++ b/azure/models/Library.go @@ -1,10 +1,10 @@ -package models - -type Library struct { - Jar string `json:"jar,omitempty" url:"jar,omitempty"` - Egg string `json:"egg,omitempty" url:"egg,omitempty"` - Whl string `json:"whl,omitempty" url:"whl,omitempty"` - Pypi *PythonPyPiLibrary `json:"pypi,omitempty" url:"pypi,omitempty"` - Maven *MavenLibrary `json:"maven,omitempty" url:"maven,omitempty"` - Cran *RCranLibrary `json:"cran,omitempty" url:"cran,omitempty"` -} +package models + +type Library struct { + Jar string `json:"jar,omitempty" url:"jar,omitempty"` + Egg string `json:"egg,omitempty" url:"egg,omitempty"` + Whl string `json:"whl,omitempty" url:"whl,omitempty"` + Pypi *PythonPyPiLibrary `json:"pypi,omitempty" url:"pypi,omitempty"` + Maven *MavenLibrary `json:"maven,omitempty" url:"maven,omitempty"` + Cran *RCranLibrary `json:"cran,omitempty" url:"cran,omitempty"` +} diff --git a/azure/models/LibraryFullStatus.go b/azure/models/LibraryFullStatus.go index 4edca20..5f2c97b 100644 --- a/azure/models/LibraryFullStatus.go +++ b/azure/models/LibraryFullStatus.go @@ -1,8 +1,8 @@ -package models - -type LibraryFullStatus struct { - Library *Library `json:"library,omitempty" url:"library,omitempty"` - Status *LibraryInstallStatus `json:"status,omitempty" url:"status,omitempty"` - Messages []string `json:"messages,omitempty" url:"messages,omitempty"` - IsLibraryForAllClusters bool `json:"is_library_for_all_clusters,omitempty" url:"is_library_for_all_clusters,omitempty"` -} +package models + +type LibraryFullStatus struct { + Library *Library `json:"library,omitempty" url:"library,omitempty"` + Status *LibraryInstallStatus `json:"status,omitempty" url:"status,omitempty"` + Messages []string `json:"messages,omitempty" url:"messages,omitempty"` + IsLibraryForAllClusters bool `json:"is_library_for_all_clusters,omitempty" url:"is_library_for_all_clusters,omitempty"` +} diff --git a/azure/models/LibraryInstallStatus.go b/azure/models/LibraryInstallStatus.go index bd84e61..2accf5a 100644 --- a/azure/models/LibraryInstallStatus.go +++ b/azure/models/LibraryInstallStatus.go @@ -1,12 +1,12 @@ -package models - -type LibraryInstallStatus string - -const ( - LibraryInstallStatusPending = "PENDING" - LibraryInstallStatusResolving = "RESOLVING" - LibraryInstallStatusInstalling = "INSTALLING" - LibraryInstallStatusInstalled = "INSTALLED" - LibraryInstallStatusFailed = "FAILED" - LibraryInstallStatusUninstallOnRestart = "UNINSTALL_ON_RESTART" -) +package models + +type LibraryInstallStatus string + +const ( + LibraryInstallStatusPending = "PENDING" + LibraryInstallStatusResolving = "RESOLVING" + LibraryInstallStatusInstalling = "INSTALLING" + LibraryInstallStatusInstalled = "INSTALLED" + LibraryInstallStatusFailed = "FAILED" + LibraryInstallStatusUninstallOnRestart = "UNINSTALL_ON_RESTART" +) diff --git a/azure/models/ListOrder.go b/azure/models/ListOrder.go index 831ddd9..3e9c17c 100644 --- a/azure/models/ListOrder.go +++ b/azure/models/ListOrder.go @@ -1,8 +1,8 @@ -package models - -type ListOrder string - -const ( - ListOrderDesc = "DESC" - ListOrderAsc = "ASC" -) +package models + +type ListOrder string + +const ( + ListOrderDesc = "DESC" + ListOrderAsc = "ASC" +) diff --git a/azure/models/MavenLibrary.go b/azure/models/MavenLibrary.go index 7f5e900..0270a8a 100644 --- a/azure/models/MavenLibrary.go +++ b/azure/models/MavenLibrary.go @@ -1,7 +1,7 @@ -package models - -type MavenLibrary struct { - Coordinates string `json:"coordinates,omitempty" url:"coordinates,omitempty"` - Repo string `json:"repo,omitempty" url:"repo,omitempty"` - Exclusions []string `json:"exclusions,omitempty" url:"exclusions,omitempty"` -} +package models + +type MavenLibrary struct { + Coordinates string `json:"coordinates,omitempty" url:"coordinates,omitempty"` + Repo string `json:"repo,omitempty" url:"repo,omitempty"` + Exclusions []string `json:"exclusions,omitempty" url:"exclusions,omitempty"` +} diff --git a/azure/models/NewCluster.go b/azure/models/NewCluster.go deleted file mode 100644 index 5410026..0000000 --- a/azure/models/NewCluster.go +++ /dev/null @@ -1,20 +0,0 @@ -package models - -type NewCluster struct { - NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"` - Autoscale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"` - ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"` - SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"` - SparkConf map[string]string `json:"spark_conf,omitempty" url:"spark_conf,omitempty"` - NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` - DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"` - CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"` - ClusterLogConf *ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"` - InitScripts []InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"` - DockerImage DockerImage `json:"docker_image,omitempty" url:"docker_image,omitempty"` - SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"` - EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"` - AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"` - InstancePoolID string `json:"instance_pool_id,omitempty" url:"instance_pool_id,omitempty"` - IdempotencyToken string `json:"idempotency_token,omitempty" url:"idempotency_token,omitempty"` -} diff --git a/azure/models/NodeType.go b/azure/models/NodeType.go deleted file mode 100644 index 7b49896..0000000 --- a/azure/models/NodeType.go +++ /dev/null @@ -1,11 +0,0 @@ -package models - -type NodeType struct { - NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"` - MemoryMb int32 `json:"memory_mb,omitempty" url:"memory_mb,omitempty"` - NumCores float32 `json:"num_cores,omitempty" url:"num_cores,omitempty"` - Description string `json:"description,omitempty" url:"description,omitempty"` - InstanceTypeID string `json:"instance_type_id,omitempty" url:"instance_type_id,omitempty"` - IsDeprecated bool `json:"is_deprecated,omitempty" url:"is_deprecated,omitempty"` - NodeInfo *ClusterCloudProviderNodeInfo `json:"node_info,omitempty" url:"node_info,omitempty"` -} diff --git a/azure/models/NotebookOutput.go b/azure/models/NotebookOutput.go index 78628a6..62c4dab 100644 --- a/azure/models/NotebookOutput.go +++ b/azure/models/NotebookOutput.go @@ -1,6 +1,6 @@ -package models - -type NotebookOutput struct { - Result string `json:"result,omitempty" url:"result,omitempty"` - Truncated bool `json:"truncated,omitempty" url:"truncated,omitempty"` -} +package models + +type NotebookOutput struct { + Result string `json:"result,omitempty" url:"result,omitempty"` + Truncated bool `json:"truncated,omitempty" url:"truncated,omitempty"` +} diff --git a/azure/models/ObjectInfo.go b/azure/models/ObjectInfo.go index 5033f11..cee81f2 100644 --- a/azure/models/ObjectInfo.go +++ b/azure/models/ObjectInfo.go @@ -1,7 +1,7 @@ -package models - -type ObjectInfo struct { - ObjectType *ObjectType `json:"object_type,omitempty" url:"object_type,omitempty"` - Path string `json:"path,omitempty" url:"path,omitempty"` - Language *Language `json:"language,omitempty" url:"language,omitempty"` -} +package models + +type ObjectInfo struct { + ObjectType *ObjectType `json:"object_type,omitempty" url:"object_type,omitempty"` + Path string `json:"path,omitempty" url:"path,omitempty"` + Language *Language `json:"language,omitempty" url:"language,omitempty"` +} diff --git a/azure/models/ObjectType.go b/azure/models/ObjectType.go index 4e3240d..03287cf 100644 --- a/azure/models/ObjectType.go +++ b/azure/models/ObjectType.go @@ -1,9 +1,9 @@ -package models - -type ObjectType string - -const ( - ObjectTypeNotebook = "NOTEBOOK" - ObjectTypeDirectory = "DIRECTORY" - ObjectTypeLibrary = "LIBRARY" -) +package models + +type ObjectType string + +const ( + ObjectTypeNotebook = "NOTEBOOK" + ObjectTypeDirectory = "DIRECTORY" + ObjectTypeLibrary = "LIBRARY" +) diff --git a/azure/models/ParamPair.go b/azure/models/ParamPair.go index c023169..029f8ab 100644 --- a/azure/models/ParamPair.go +++ b/azure/models/ParamPair.go @@ -1,6 +1,6 @@ -package models - -type ParamPair struct { - Key string `json:"key,omitempty" url:"key,omitempty"` - Value string `json:"value,omitempty" url:"value,omitempty"` -} +package models + +type ParamPair struct { + Key string `json:"key,omitempty" url:"key,omitempty"` + Value string `json:"value,omitempty" url:"value,omitempty"` +} diff --git a/azure/models/PrincipalName.go b/azure/models/PrincipalName.go index 9b5346e..c1d5023 100644 --- a/azure/models/PrincipalName.go +++ b/azure/models/PrincipalName.go @@ -1,6 +1,6 @@ -package models - -type PrincipalName struct { - UserName string `json:"user_name,omitempty" url:"user_name,omitempty"` - GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` -} +package models + +type PrincipalName struct { + UserName string `json:"user_name,omitempty" url:"user_name,omitempty"` + GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"` +} diff --git a/azure/models/PublicTokenInfo.go b/azure/models/PublicTokenInfo.go index 9b62146..3d29372 100644 --- a/azure/models/PublicTokenInfo.go +++ b/azure/models/PublicTokenInfo.go @@ -1,8 +1,8 @@ -package models - -type PublicTokenInfo struct { - TokenID string `json:"token_id,omitempty" url:"token_id,omitempty"` - CreationTime int64 `json:"creation_time,omitempty" url:"creation_time,omitempty"` - ExpiryTime int64 `json:"expiry_time,omitempty" url:"expiry_time,omitempty"` - Comment string `json:"comment,omitempty" url:"comment,omitempty"` -} +package models + +type PublicTokenInfo struct { + TokenID string `json:"token_id,omitempty" url:"token_id,omitempty"` + CreationTime int64 `json:"creation_time,omitempty" url:"creation_time,omitempty"` + ExpiryTime int64 `json:"expiry_time,omitempty" url:"expiry_time,omitempty"` + Comment string `json:"comment,omitempty" url:"comment,omitempty"` +} diff --git a/azure/models/PythonPyPiLibrary.go b/azure/models/PythonPyPiLibrary.go index 0ade9b0..eacc27c 100644 --- a/azure/models/PythonPyPiLibrary.go +++ b/azure/models/PythonPyPiLibrary.go @@ -1,6 +1,6 @@ -package models - -type PythonPyPiLibrary struct { - Package string `json:"package,omitempty" url:"package,omitempty"` - Repo string `json:"repo,omitempty" url:"repo,omitempty"` -} +package models + +type PythonPyPiLibrary struct { + Package string `json:"package,omitempty" url:"package,omitempty"` + Repo string `json:"repo,omitempty" url:"repo,omitempty"` +} diff --git a/azure/models/RCranLibrary.go b/azure/models/RCranLibrary.go index e1ded03..f1c90ca 100644 --- a/azure/models/RCranLibrary.go +++ b/azure/models/RCranLibrary.go @@ -1,6 +1,6 @@ -package models - -type RCranLibrary struct { - Package string `json:"package,omitempty" url:"package,omitempty"` - Repo string `json:"repo,omitempty" url:"repo,omitempty"` -} +package models + +type RCranLibrary struct { + Package string `json:"package,omitempty" url:"package,omitempty"` + Repo string `json:"repo,omitempty" url:"repo,omitempty"` +} diff --git a/azure/models/ResizeCause.go b/azure/models/ResizeCause.go index a83ab2f..0370f8b 100644 --- a/azure/models/ResizeCause.go +++ b/azure/models/ResizeCause.go @@ -1,9 +1,9 @@ -package models - -type ResizeCause string - -const ( - ResizeCauseAutoscale = "AUTOSCALE" - ResizeCauseUserRequest = "USER_REQUEST" - ResizeCauseAutorecovery = "AUTORECOVERY" -) +package models + +type ResizeCause string + +const ( + ResizeCauseAutoscale = "AUTOSCALE" + ResizeCauseUserRequest = "USER_REQUEST" + ResizeCauseAutorecovery = "AUTORECOVERY" +) diff --git a/azure/models/ScopeBackendType.go b/azure/models/ScopeBackendType.go index d3f4afd..5b6a2d4 100644 --- a/azure/models/ScopeBackendType.go +++ b/azure/models/ScopeBackendType.go @@ -1,8 +1,8 @@ -package models - -type ScopeBackendType string - -const ( - ScopeBackendTypeDatabricks = "DATABRICKS" - ScopeBackendTypeAzureKeyvault = "AZURE_KEYVAULT" -) +package models + +type ScopeBackendType string + +const ( + ScopeBackendTypeDatabricks = "DATABRICKS" + ScopeBackendTypeAzureKeyvault = "AZURE_KEYVAULT" +) diff --git a/azure/models/SecretMetadata.go b/azure/models/SecretMetadata.go index b1f8a58..4503bfa 100644 --- a/azure/models/SecretMetadata.go +++ b/azure/models/SecretMetadata.go @@ -1,6 +1,6 @@ -package models - -type SecretMetadata struct { - Key string `json:"key,omitempty" url:"key,omitempty"` - LastUpdatedTimestamp int64 `json:"last_updated_timestamp,omitempty" url:"last_updated_timestamp,omitempty"` -} +package models + +type SecretMetadata struct { + Key string `json:"key,omitempty" url:"key,omitempty"` + LastUpdatedTimestamp int64 `json:"last_updated_timestamp,omitempty" url:"last_updated_timestamp,omitempty"` +} diff --git a/azure/models/SecretScope.go b/azure/models/SecretScope.go index c83c4e1..380f54c 100644 --- a/azure/models/SecretScope.go +++ b/azure/models/SecretScope.go @@ -1,6 +1,6 @@ -package models - -type SecretScope struct { - Name string `json:"name,omitempty" url:"name,omitempty"` - BackendType *ScopeBackendType `json:"backend_type,omitempty" url:"backend_type,omitempty"` -} +package models + +type SecretScope struct { + Name string `json:"name,omitempty" url:"name,omitempty"` + BackendType *ScopeBackendType `json:"backend_type,omitempty" url:"backend_type,omitempty"` +} diff --git a/azure/models/SparkEnvPair.go b/azure/models/SparkEnvPair.go index aa9b3dd..6b70c7f 100644 --- a/azure/models/SparkEnvPair.go +++ b/azure/models/SparkEnvPair.go @@ -1,6 +1,6 @@ -package models - -type SparkEnvPair struct { - Key string `json:"key,omitempty" url:"key,omitempty"` - Value string `json:"value,omitempty" url:"value,omitempty"` -} +package models + +type SparkEnvPair struct { + Key string `json:"key,omitempty" url:"key,omitempty"` + Value string `json:"value,omitempty" url:"value,omitempty"` +} diff --git a/azure/models/SparkVersion.go b/azure/models/SparkVersion.go deleted file mode 100644 index 9b22893..0000000 --- a/azure/models/SparkVersion.go +++ /dev/null @@ -1,6 +0,0 @@ -package models - -type SparkVersion struct { - Key string `json:"key,omitempty" url:"key,omitempty"` - Name string `json:"name,omitempty" url:"name,omitempty"` -} diff --git a/azure/models/ViewItem.go b/azure/models/ViewItem.go index 58ae2a4..7296d0e 100644 --- a/azure/models/ViewItem.go +++ b/azure/models/ViewItem.go @@ -1,7 +1,7 @@ -package models - -type ViewItem struct { - Content string `json:"content,omitempty" url:"content,omitempty"` - Name string `json:"name,omitempty" url:"name,omitempty"` - Type *ViewType `json:"type,omitempty" url:"type,omitempty"` -} +package models + +type ViewItem struct { + Content string `json:"content,omitempty" url:"content,omitempty"` + Name string `json:"name,omitempty" url:"name,omitempty"` + Type *ViewType `json:"type,omitempty" url:"type,omitempty"` +} diff --git a/azure/models/ViewType.go b/azure/models/ViewType.go index c3e0473..b936383 100644 --- a/azure/models/ViewType.go +++ b/azure/models/ViewType.go @@ -1,8 +1,8 @@ -package models - -type ViewType string - -const ( - ViewTypeNotebook = "NOTEBOOK" - ViewTypeDashboard = "DASHBOARD" -) +package models + +type ViewType string + +const ( + ViewTypeNotebook = "NOTEBOOK" + ViewTypeDashboard = "DASHBOARD" +) diff --git a/azure/models/ViewsToExport.go b/azure/models/ViewsToExport.go index 53c504d..7ed0578 100644 --- a/azure/models/ViewsToExport.go +++ b/azure/models/ViewsToExport.go @@ -1,9 +1,9 @@ -package models - -type ViewsToExport string - -const ( - ViewsToExportCode = "CODE" - ViewsToExportDashboards = "DASHBOARDS" - ViewsToExportAll = "ALL" -) +package models + +type ViewsToExport string + +const ( + ViewsToExportCode = "CODE" + ViewsToExportDashboards = "DASHBOARDS" + ViewsToExportAll = "ALL" +) diff --git a/azure/models/deepcopy_generated.go b/azure/models/deepcopy_generated.go deleted file mode 100644 index 5521445..0000000 --- a/azure/models/deepcopy_generated.go +++ /dev/null @@ -1,1342 +0,0 @@ -// +build !ignore_autogenerated - -// Code generated by deepcopy-gen. DO NOT EDIT. - -package models - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *AclItem) DeepCopyInto(out *AclItem) { - *out = *in - if in.Permission != nil { - in, out := &in.Permission, &out.Permission - *out = new(AclPermission) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AclItem. -func (in *AclItem) DeepCopy() *AclItem { - if in == nil { - return nil - } - out := new(AclItem) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *AutoScale) DeepCopyInto(out *AutoScale) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoScale. -func (in *AutoScale) DeepCopy() *AutoScale { - if in == nil { - return nil - } - out := new(AutoScale) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterAttributes) DeepCopyInto(out *ClusterAttributes) { - *out = *in - if in.SparkConf != nil { - in, out := &in.SparkConf, &out.SparkConf - *out = new(SparkConfPair) - **out = **in - } - if in.SSHPublicKeys != nil { - in, out := &in.SSHPublicKeys, &out.SSHPublicKeys - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.CustomTags != nil { - in, out := &in.CustomTags, &out.CustomTags - *out = make([]ClusterTag, len(*in)) - copy(*out, *in) - } - if in.ClusterLogConf != nil { - in, out := &in.ClusterLogConf, &out.ClusterLogConf - *out = new(ClusterLogConf) - (*in).DeepCopyInto(*out) - } - if in.InitScripts != nil { - in, out := &in.InitScripts, &out.InitScripts - *out = make([]InitScriptInfo, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.SparkEnvVars != nil { - in, out := &in.SparkEnvVars, &out.SparkEnvVars - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.ClusterSource != nil { - in, out := &in.ClusterSource, &out.ClusterSource - *out = new(ClusterSource) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterAttributes. -func (in *ClusterAttributes) DeepCopy() *ClusterAttributes { - if in == nil { - return nil - } - out := new(ClusterAttributes) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterCloudProviderNodeInfo) DeepCopyInto(out *ClusterCloudProviderNodeInfo) { - *out = *in - if in.Status != nil { - in, out := &in.Status, &out.Status - *out = new(ClusterCloudProviderNodeStatus) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterCloudProviderNodeInfo. -func (in *ClusterCloudProviderNodeInfo) DeepCopy() *ClusterCloudProviderNodeInfo { - if in == nil { - return nil - } - out := new(ClusterCloudProviderNodeInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterEvent) DeepCopyInto(out *ClusterEvent) { - *out = *in - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(ClusterEventType) - **out = **in - } - if in.Details != nil { - in, out := &in.Details, &out.Details - *out = new(EventDetails) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterEvent. -func (in *ClusterEvent) DeepCopy() *ClusterEvent { - if in == nil { - return nil - } - out := new(ClusterEvent) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterInfo) DeepCopyInto(out *ClusterInfo) { - *out = *in - if in.AutoScale != nil { - in, out := &in.AutoScale, &out.AutoScale - *out = new(AutoScale) - **out = **in - } - if in.Driver != nil { - in, out := &in.Driver, &out.Driver - *out = new(SparkNode) - **out = **in - } - if in.Executors != nil { - in, out := &in.Executors, &out.Executors - *out = make([]SparkNode, len(*in)) - copy(*out, *in) - } - if in.SparkConf != nil { - in, out := &in.SparkConf, &out.SparkConf - *out = new(SparkConfPair) - **out = **in - } - if in.ClusterLogConf != nil { - in, out := &in.ClusterLogConf, &out.ClusterLogConf - *out = new(ClusterLogConf) - (*in).DeepCopyInto(*out) - } - if in.InitScripts != nil { - in, out := &in.InitScripts, &out.InitScripts - *out = make([]InitScriptInfo, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.SparkEnvVars != nil { - in, out := &in.SparkEnvVars, &out.SparkEnvVars - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.State != nil { - in, out := &in.State, &out.State - *out = new(ClusterState) - **out = **in - } - if in.DefaultTags != nil { - in, out := &in.DefaultTags, &out.DefaultTags - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.ClusterLogStatus != nil { - in, out := &in.ClusterLogStatus, &out.ClusterLogStatus - *out = new(LogSyncStatus) - **out = **in - } - if in.TerminationReason != nil { - in, out := &in.TerminationReason, &out.TerminationReason - *out = new(TerminationReason) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterInfo. -func (in *ClusterInfo) DeepCopy() *ClusterInfo { - if in == nil { - return nil - } - out := new(ClusterInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterInstance) DeepCopyInto(out *ClusterInstance) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterInstance. -func (in *ClusterInstance) DeepCopy() *ClusterInstance { - if in == nil { - return nil - } - out := new(ClusterInstance) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterLibraryStatuses) DeepCopyInto(out *ClusterLibraryStatuses) { - *out = *in - if in.LibraryStatuses != nil { - in, out := &in.LibraryStatuses, &out.LibraryStatuses - *out = make([]LibraryFullStatus, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterLibraryStatuses. -func (in *ClusterLibraryStatuses) DeepCopy() *ClusterLibraryStatuses { - if in == nil { - return nil - } - out := new(ClusterLibraryStatuses) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterLogConf) DeepCopyInto(out *ClusterLogConf) { - *out = *in - if in.Dbfs != nil { - in, out := &in.Dbfs, &out.Dbfs - *out = new(DbfsStorageInfo) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterLogConf. -func (in *ClusterLogConf) DeepCopy() *ClusterLogConf { - if in == nil { - return nil - } - out := new(ClusterLogConf) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterSize) DeepCopyInto(out *ClusterSize) { - *out = *in - if in.Autoscale != nil { - in, out := &in.Autoscale, &out.Autoscale - *out = new(AutoScale) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSize. -func (in *ClusterSize) DeepCopy() *ClusterSize { - if in == nil { - return nil - } - out := new(ClusterSize) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterSpec) DeepCopyInto(out *ClusterSpec) { - *out = *in - if in.NewCluster != nil { - in, out := &in.NewCluster, &out.NewCluster - *out = new(NewCluster) - (*in).DeepCopyInto(*out) - } - if in.Libraries != nil { - in, out := &in.Libraries, &out.Libraries - *out = make([]Library, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpec. -func (in *ClusterSpec) DeepCopy() *ClusterSpec { - if in == nil { - return nil - } - out := new(ClusterSpec) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ClusterTag) DeepCopyInto(out *ClusterTag) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterTag. -func (in *ClusterTag) DeepCopy() *ClusterTag { - if in == nil { - return nil - } - out := new(ClusterTag) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *CronSchedule) DeepCopyInto(out *CronSchedule) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CronSchedule. -func (in *CronSchedule) DeepCopy() *CronSchedule { - if in == nil { - return nil - } - out := new(CronSchedule) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DbfsStorageInfo) DeepCopyInto(out *DbfsStorageInfo) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsStorageInfo. -func (in *DbfsStorageInfo) DeepCopy() *DbfsStorageInfo { - if in == nil { - return nil - } - out := new(DbfsStorageInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DiskSpec) DeepCopyInto(out *DiskSpec) { - *out = *in - if in.DiskType != nil { - in, out := &in.DiskType, &out.DiskType - *out = new(DiskType) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DiskSpec. -func (in *DiskSpec) DeepCopy() *DiskSpec { - if in == nil { - return nil - } - out := new(DiskSpec) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DiskType) DeepCopyInto(out *DiskType) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DiskType. -func (in *DiskType) DeepCopy() *DiskType { - if in == nil { - return nil - } - out := new(DiskType) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EventDetails) DeepCopyInto(out *EventDetails) { - *out = *in - if in.PreviousAttributes != nil { - in, out := &in.PreviousAttributes, &out.PreviousAttributes - *out = new(ClusterAttributes) - (*in).DeepCopyInto(*out) - } - if in.Attributes != nil { - in, out := &in.Attributes, &out.Attributes - *out = new(ClusterAttributes) - (*in).DeepCopyInto(*out) - } - if in.PreviousClusterSize != nil { - in, out := &in.PreviousClusterSize, &out.PreviousClusterSize - *out = new(ClusterSize) - (*in).DeepCopyInto(*out) - } - if in.ClusterSize != nil { - in, out := &in.ClusterSize, &out.ClusterSize - *out = new(ClusterSize) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EventDetails. -func (in *EventDetails) DeepCopy() *EventDetails { - if in == nil { - return nil - } - out := new(EventDetails) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *FileInfo) DeepCopyInto(out *FileInfo) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FileInfo. -func (in *FileInfo) DeepCopy() *FileInfo { - if in == nil { - return nil - } - out := new(FileInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InitScriptInfo) DeepCopyInto(out *InitScriptInfo) { - *out = *in - if in.Dbfs != nil { - in, out := &in.Dbfs, &out.Dbfs - *out = new(DbfsStorageInfo) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InitScriptInfo. -func (in *InitScriptInfo) DeepCopy() *InitScriptInfo { - if in == nil { - return nil - } - out := new(InitScriptInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InstancePoolAndStats) DeepCopyInto(out *InstancePoolAndStats) { - *out = *in - if in.CustomTags != nil { - in, out := &in.CustomTags, &out.CustomTags - *out = make([]ClusterTag, len(*in)) - copy(*out, *in) - } - in.DiskSpec.DeepCopyInto(&out.DiskSpec) - if in.PreloadedSparkVersions != nil { - in, out := &in.PreloadedSparkVersions, &out.PreloadedSparkVersions - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.DefaultTags != nil { - in, out := &in.DefaultTags, &out.DefaultTags - *out = make([]ClusterTag, len(*in)) - copy(*out, *in) - } - out.Stats = in.Stats - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstancePoolAndStats. -func (in *InstancePoolAndStats) DeepCopy() *InstancePoolAndStats { - if in == nil { - return nil - } - out := new(InstancePoolAndStats) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InstancePoolStats) DeepCopyInto(out *InstancePoolStats) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstancePoolStats. -func (in *InstancePoolStats) DeepCopy() *InstancePoolStats { - if in == nil { - return nil - } - out := new(InstancePoolStats) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Job) DeepCopyInto(out *Job) { - *out = *in - if in.Settings != nil { - in, out := &in.Settings, &out.Settings - *out = new(JobSettings) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Job. -func (in *Job) DeepCopy() *Job { - if in == nil { - return nil - } - out := new(Job) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobEmailNotifications) DeepCopyInto(out *JobEmailNotifications) { - *out = *in - if in.OnStart != nil { - in, out := &in.OnStart, &out.OnStart - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.OnSuccess != nil { - in, out := &in.OnSuccess, &out.OnSuccess - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.OnFailure != nil { - in, out := &in.OnFailure, &out.OnFailure - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobEmailNotifications. -func (in *JobEmailNotifications) DeepCopy() *JobEmailNotifications { - if in == nil { - return nil - } - out := new(JobEmailNotifications) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobSettings) DeepCopyInto(out *JobSettings) { - *out = *in - if in.NewCluster != nil { - in, out := &in.NewCluster, &out.NewCluster - *out = new(NewCluster) - (*in).DeepCopyInto(*out) - } - if in.NotebookTask != nil { - in, out := &in.NotebookTask, &out.NotebookTask - *out = new(NotebookTask) - (*in).DeepCopyInto(*out) - } - if in.SparkJarTask != nil { - in, out := &in.SparkJarTask, &out.SparkJarTask - *out = new(SparkJarTask) - (*in).DeepCopyInto(*out) - } - if in.SparkPythonTask != nil { - in, out := &in.SparkPythonTask, &out.SparkPythonTask - *out = new(SparkPythonTask) - (*in).DeepCopyInto(*out) - } - if in.SparkSubmitTask != nil { - in, out := &in.SparkSubmitTask, &out.SparkSubmitTask - *out = new(SparkSubmitTask) - (*in).DeepCopyInto(*out) - } - if in.Libraries != nil { - in, out := &in.Libraries, &out.Libraries - *out = make([]Library, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.EmailNotifications != nil { - in, out := &in.EmailNotifications, &out.EmailNotifications - *out = new(JobEmailNotifications) - (*in).DeepCopyInto(*out) - } - if in.Schedule != nil { - in, out := &in.Schedule, &out.Schedule - *out = new(CronSchedule) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSettings. -func (in *JobSettings) DeepCopy() *JobSettings { - if in == nil { - return nil - } - out := new(JobSettings) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobTask) DeepCopyInto(out *JobTask) { - *out = *in - if in.NotebookTask != nil { - in, out := &in.NotebookTask, &out.NotebookTask - *out = new(NotebookTask) - (*in).DeepCopyInto(*out) - } - if in.SparkJarTask != nil { - in, out := &in.SparkJarTask, &out.SparkJarTask - *out = new(SparkJarTask) - (*in).DeepCopyInto(*out) - } - if in.SparkPythonTask != nil { - in, out := &in.SparkPythonTask, &out.SparkPythonTask - *out = new(SparkPythonTask) - (*in).DeepCopyInto(*out) - } - if in.SparkSubmitTask != nil { - in, out := &in.SparkSubmitTask, &out.SparkSubmitTask - *out = new(SparkSubmitTask) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobTask. -func (in *JobTask) DeepCopy() *JobTask { - if in == nil { - return nil - } - out := new(JobTask) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Library) DeepCopyInto(out *Library) { - *out = *in - if in.Pypi != nil { - in, out := &in.Pypi, &out.Pypi - *out = new(PythonPyPiLibrary) - **out = **in - } - if in.Maven != nil { - in, out := &in.Maven, &out.Maven - *out = new(MavenLibrary) - (*in).DeepCopyInto(*out) - } - if in.Cran != nil { - in, out := &in.Cran, &out.Cran - *out = new(RCranLibrary) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Library. -func (in *Library) DeepCopy() *Library { - if in == nil { - return nil - } - out := new(Library) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LibraryFullStatus) DeepCopyInto(out *LibraryFullStatus) { - *out = *in - if in.Library != nil { - in, out := &in.Library, &out.Library - *out = new(Library) - (*in).DeepCopyInto(*out) - } - if in.Status != nil { - in, out := &in.Status, &out.Status - *out = new(LibraryInstallStatus) - **out = **in - } - if in.Messages != nil { - in, out := &in.Messages, &out.Messages - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LibraryFullStatus. -func (in *LibraryFullStatus) DeepCopy() *LibraryFullStatus { - if in == nil { - return nil - } - out := new(LibraryFullStatus) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LogSyncStatus) DeepCopyInto(out *LogSyncStatus) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LogSyncStatus. -func (in *LogSyncStatus) DeepCopy() *LogSyncStatus { - if in == nil { - return nil - } - out := new(LogSyncStatus) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *MavenLibrary) DeepCopyInto(out *MavenLibrary) { - *out = *in - if in.Exclusions != nil { - in, out := &in.Exclusions, &out.Exclusions - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MavenLibrary. -func (in *MavenLibrary) DeepCopy() *MavenLibrary { - if in == nil { - return nil - } - out := new(MavenLibrary) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *NewCluster) DeepCopyInto(out *NewCluster) { - *out = *in - if in.Autoscale != nil { - in, out := &in.Autoscale, &out.Autoscale - *out = new(AutoScale) - **out = **in - } - if in.SparkConf != nil { - in, out := &in.SparkConf, &out.SparkConf - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.CustomTags != nil { - in, out := &in.CustomTags, &out.CustomTags - *out = make([]ClusterTag, len(*in)) - copy(*out, *in) - } - if in.ClusterLogConf != nil { - in, out := &in.ClusterLogConf, &out.ClusterLogConf - *out = new(ClusterLogConf) - (*in).DeepCopyInto(*out) - } - if in.InitScripts != nil { - in, out := &in.InitScripts, &out.InitScripts - *out = make([]InitScriptInfo, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.SparkEnvVars != nil { - in, out := &in.SparkEnvVars, &out.SparkEnvVars - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NewCluster. -func (in *NewCluster) DeepCopy() *NewCluster { - if in == nil { - return nil - } - out := new(NewCluster) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *NodeType) DeepCopyInto(out *NodeType) { - *out = *in - if in.NodeInfo != nil { - in, out := &in.NodeInfo, &out.NodeInfo - *out = new(ClusterCloudProviderNodeInfo) - (*in).DeepCopyInto(*out) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodeType. -func (in *NodeType) DeepCopy() *NodeType { - if in == nil { - return nil - } - out := new(NodeType) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *NotebookOutput) DeepCopyInto(out *NotebookOutput) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NotebookOutput. -func (in *NotebookOutput) DeepCopy() *NotebookOutput { - if in == nil { - return nil - } - out := new(NotebookOutput) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *NotebookTask) DeepCopyInto(out *NotebookTask) { - *out = *in - if in.BaseParameters != nil { - in, out := &in.BaseParameters, &out.BaseParameters - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NotebookTask. -func (in *NotebookTask) DeepCopy() *NotebookTask { - if in == nil { - return nil - } - out := new(NotebookTask) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ObjectInfo) DeepCopyInto(out *ObjectInfo) { - *out = *in - if in.ObjectType != nil { - in, out := &in.ObjectType, &out.ObjectType - *out = new(ObjectType) - **out = **in - } - if in.Language != nil { - in, out := &in.Language, &out.Language - *out = new(Language) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ObjectInfo. -func (in *ObjectInfo) DeepCopy() *ObjectInfo { - if in == nil { - return nil - } - out := new(ObjectInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ParamPair) DeepCopyInto(out *ParamPair) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParamPair. -func (in *ParamPair) DeepCopy() *ParamPair { - if in == nil { - return nil - } - out := new(ParamPair) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ParameterPair) DeepCopyInto(out *ParameterPair) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParameterPair. -func (in *ParameterPair) DeepCopy() *ParameterPair { - if in == nil { - return nil - } - out := new(ParameterPair) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PrincipalName) DeepCopyInto(out *PrincipalName) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrincipalName. -func (in *PrincipalName) DeepCopy() *PrincipalName { - if in == nil { - return nil - } - out := new(PrincipalName) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PublicTokenInfo) DeepCopyInto(out *PublicTokenInfo) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PublicTokenInfo. -func (in *PublicTokenInfo) DeepCopy() *PublicTokenInfo { - if in == nil { - return nil - } - out := new(PublicTokenInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PythonPyPiLibrary) DeepCopyInto(out *PythonPyPiLibrary) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PythonPyPiLibrary. -func (in *PythonPyPiLibrary) DeepCopy() *PythonPyPiLibrary { - if in == nil { - return nil - } - out := new(PythonPyPiLibrary) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RCranLibrary) DeepCopyInto(out *RCranLibrary) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RCranLibrary. -func (in *RCranLibrary) DeepCopy() *RCranLibrary { - if in == nil { - return nil - } - out := new(RCranLibrary) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Run) DeepCopyInto(out *Run) { - *out = *in - if in.State != nil { - in, out := &in.State, &out.State - *out = new(RunState) - (*in).DeepCopyInto(*out) - } - if in.Schedule != nil { - in, out := &in.Schedule, &out.Schedule - *out = new(CronSchedule) - **out = **in - } - if in.Task != nil { - in, out := &in.Task, &out.Task - *out = new(JobTask) - (*in).DeepCopyInto(*out) - } - if in.ClusterSpec != nil { - in, out := &in.ClusterSpec, &out.ClusterSpec - *out = new(ClusterSpec) - (*in).DeepCopyInto(*out) - } - if in.ClusterInstance != nil { - in, out := &in.ClusterInstance, &out.ClusterInstance - *out = new(ClusterInstance) - **out = **in - } - if in.OverridingParameters != nil { - in, out := &in.OverridingParameters, &out.OverridingParameters - *out = new(RunParameters) - (*in).DeepCopyInto(*out) - } - if in.Trigger != nil { - in, out := &in.Trigger, &out.Trigger - *out = new(TriggerType) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Run. -func (in *Run) DeepCopy() *Run { - if in == nil { - return nil - } - out := new(Run) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RunParameters) DeepCopyInto(out *RunParameters) { - *out = *in - if in.JarParams != nil { - in, out := &in.JarParams, &out.JarParams - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.NotebookParams != nil { - in, out := &in.NotebookParams, &out.NotebookParams - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.PythonParams != nil { - in, out := &in.PythonParams, &out.PythonParams - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.SparkSubmitParams != nil { - in, out := &in.SparkSubmitParams, &out.SparkSubmitParams - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RunParameters. -func (in *RunParameters) DeepCopy() *RunParameters { - if in == nil { - return nil - } - out := new(RunParameters) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RunState) DeepCopyInto(out *RunState) { - *out = *in - if in.LifeCycleState != nil { - in, out := &in.LifeCycleState, &out.LifeCycleState - *out = new(RunLifeCycleState) - **out = **in - } - if in.ResultState != nil { - in, out := &in.ResultState, &out.ResultState - *out = new(RunResultState) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RunState. -func (in *RunState) DeepCopy() *RunState { - if in == nil { - return nil - } - out := new(RunState) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SecretMetadata) DeepCopyInto(out *SecretMetadata) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecretMetadata. -func (in *SecretMetadata) DeepCopy() *SecretMetadata { - if in == nil { - return nil - } - out := new(SecretMetadata) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SecretScope) DeepCopyInto(out *SecretScope) { - *out = *in - if in.BackendType != nil { - in, out := &in.BackendType, &out.BackendType - *out = new(ScopeBackendType) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecretScope. -func (in *SecretScope) DeepCopy() *SecretScope { - if in == nil { - return nil - } - out := new(SecretScope) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkConfPair) DeepCopyInto(out *SparkConfPair) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkConfPair. -func (in *SparkConfPair) DeepCopy() *SparkConfPair { - if in == nil { - return nil - } - out := new(SparkConfPair) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkEnvPair) DeepCopyInto(out *SparkEnvPair) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkEnvPair. -func (in *SparkEnvPair) DeepCopy() *SparkEnvPair { - if in == nil { - return nil - } - out := new(SparkEnvPair) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkJarTask) DeepCopyInto(out *SparkJarTask) { - *out = *in - if in.Parameters != nil { - in, out := &in.Parameters, &out.Parameters - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkJarTask. -func (in *SparkJarTask) DeepCopy() *SparkJarTask { - if in == nil { - return nil - } - out := new(SparkJarTask) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkNode) DeepCopyInto(out *SparkNode) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkNode. -func (in *SparkNode) DeepCopy() *SparkNode { - if in == nil { - return nil - } - out := new(SparkNode) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkPythonTask) DeepCopyInto(out *SparkPythonTask) { - *out = *in - if in.Parameters != nil { - in, out := &in.Parameters, &out.Parameters - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkPythonTask. -func (in *SparkPythonTask) DeepCopy() *SparkPythonTask { - if in == nil { - return nil - } - out := new(SparkPythonTask) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkSubmitTask) DeepCopyInto(out *SparkSubmitTask) { - *out = *in - if in.Parameters != nil { - in, out := &in.Parameters, &out.Parameters - *out = make([]string, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkSubmitTask. -func (in *SparkSubmitTask) DeepCopy() *SparkSubmitTask { - if in == nil { - return nil - } - out := new(SparkSubmitTask) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkVersion) DeepCopyInto(out *SparkVersion) { - *out = *in - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkVersion. -func (in *SparkVersion) DeepCopy() *SparkVersion { - if in == nil { - return nil - } - out := new(SparkVersion) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TerminationReason) DeepCopyInto(out *TerminationReason) { - *out = *in - if in.Code != nil { - in, out := &in.Code, &out.Code - *out = new(TerminationCode) - **out = **in - } - if in.Parameters != nil { - in, out := &in.Parameters, &out.Parameters - *out = make([]ParameterPair, len(*in)) - copy(*out, *in) - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TerminationReason. -func (in *TerminationReason) DeepCopy() *TerminationReason { - if in == nil { - return nil - } - out := new(TerminationReason) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ViewItem) DeepCopyInto(out *ViewItem) { - *out = *in - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(ViewType) - **out = **in - } - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ViewItem. -func (in *ViewItem) DeepCopy() *ViewItem { - if in == nil { - return nil - } - out := new(ViewItem) - in.DeepCopyInto(out) - return out -} diff --git a/azure/models/doc.go b/azure/models/doc.go index 2073e9a..aa00136 100644 --- a/azure/models/doc.go +++ b/azure/models/doc.go @@ -1,2 +1,2 @@ -// +k8s:deepcopy-gen=package -package models +// +k8s:deepcopy-gen=package +package models diff --git a/azure/scim.go b/azure/scim.go index 6f733b9..1f71924 100644 --- a/azure/scim.go +++ b/azure/scim.go @@ -1,11 +1,11 @@ -package azure - -// ScimAPI exposes the SCIM API -type ScimAPI struct { - Client DBClient -} - -func (a ScimAPI) init(client DBClient) ScimAPI { - a.Client = client - return a -} +package azure + +// ScimAPI exposes the SCIM API +type ScimAPI struct { + Client DBClient +} + +func (a ScimAPI) init(client DBClient) ScimAPI { + a.Client = client + return a +} diff --git a/azure/secrets.go b/azure/secrets.go index 9cc73c9..61306f9 100644 --- a/azure/secrets.go +++ b/azure/secrets.go @@ -1,190 +1,190 @@ -package azure - -import ( - "encoding/base64" - "encoding/json" - "net/http" - - "github.com/xinsnake/databricks-sdk-golang/azure/models" -) - -// SecretsAPI exposes the Secrets API -type SecretsAPI struct { - Client DBClient -} - -func (a SecretsAPI) init(client DBClient) SecretsAPI { - a.Client = client - return a -} - -// CreateSecretScope creates a new secret scope -func (a SecretsAPI) CreateSecretScope(scope, initialManagePrincipal string) error { - data := struct { - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - InitialManagePrincipal string `json:"initial_manage_principal,omitempty" url:"initial_manage_principal,omitempty"` - }{ - scope, - initialManagePrincipal, - } - _, err := a.Client.performQuery(http.MethodPost, "/secrets/scopes/create", data, nil) - return err -} - -// DeleteSecretScope deletes a secret scope -func (a SecretsAPI) DeleteSecretScope(scope string) error { - data := struct { - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - }{ - scope, - } - _, err := a.Client.performQuery(http.MethodPost, "/secrets/scopes/delete", data, nil) - return err -} - -// ListSecretScopes lists all secret scopes available in the workspace -func (a SecretsAPI) ListSecretScopes() ([]models.SecretScope, error) { - var listSecretScopesResponse struct { - Scopes []models.SecretScope `json:"scopes,omitempty" url:"scopes,omitempty"` - } - - resp, err := a.Client.performQuery(http.MethodGet, "/secrets/scopes/list", nil, nil) - if err != nil { - return listSecretScopesResponse.Scopes, err - } - - err = json.Unmarshal(resp, &listSecretScopesResponse) - return listSecretScopesResponse.Scopes, err -} - -// PutSecret creates or modifies a bytes secret depends on the type of scope backend with -func (a SecretsAPI) PutSecret(bytesValue []byte, scope, key string) error { - data := struct { - BytesValue string `json:"bytes_value,omitempty" url:"bytes_value,omitempty"` - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - Key string `json:"key,omitempty" url:"key,omitempty"` - }{ - base64.StdEncoding.EncodeToString(bytesValue), - scope, - key, - } - _, err := a.Client.performQuery(http.MethodPost, "/secrets/put", data, nil) - return err -} - -// PutSecretString creates or modifies a string secret depends on the type of scope backend -func (a SecretsAPI) PutSecretString(stringValue, scope, key string) error { - data := struct { - StringValue string `json:"string_value,omitempty" url:"string_value,omitempty"` - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - Key string `json:"key,omitempty" url:"key,omitempty"` - }{ - stringValue, - scope, - key, - } - _, err := a.Client.performQuery(http.MethodPost, "/secrets/put", data, nil) - return err -} - -// DeleteSecret deletes a secret depends on the type of scope backend -func (a SecretsAPI) DeleteSecret(scope, key string) error { - data := struct { - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - Key string `json:"key,omitempty" url:"key,omitempty"` - }{ - scope, - key, - } - _, err := a.Client.performQuery(http.MethodPost, "/secrets/delete", data, nil) - return err -} - -// ListSecrets lists the secret keys that are stored at this scope -func (a SecretsAPI) ListSecrets(scope string) ([]models.SecretMetadata, error) { - var secretsList struct { - Secrets []models.SecretMetadata `json:"secrets,omitempty" url:"secrets,omitempty"` - } - - data := struct { - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - }{ - scope, - } - - resp, err := a.Client.performQuery(http.MethodGet, "/secrets/list", data, nil) - if err != nil { - return secretsList.Secrets, err - } - - err = json.Unmarshal(resp, &secretsList) - return secretsList.Secrets, err -} - -// PutSecretACL creates or overwrites the ACL associated with the given principal (user or group) on the specified scope point -func (a SecretsAPI) PutSecretACL(scope, principal string, permission models.AclPermission) error { - data := struct { - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - Principal string `json:"principal,omitempty" url:"principal,omitempty"` - Permission models.AclPermission `json:"permission,omitempty" url:"permission,omitempty"` - }{ - scope, - principal, - permission, - } - _, err := a.Client.performQuery(http.MethodPost, "/secrets/acls/put", data, nil) - return err -} - -// DeleteSecretACL deletes the given ACL on the given scope -func (a SecretsAPI) DeleteSecretACL(scope, principal string) error { - data := struct { - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - Principal string `json:"principal,omitempty" url:"principal,omitempty"` - }{ - scope, - principal, - } - _, err := a.Client.performQuery(http.MethodPost, "/secrets/acls/delete", data, nil) - return err -} - -// GetSecretACL describe the details about the given ACL, such as the group and permission -func (a SecretsAPI) GetSecretACL(scope, principal string) (models.AclItem, error) { - var aclItem models.AclItem - - data := struct { - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - Principal string `json:"principal,omitempty" url:"principal,omitempty"` - }{ - scope, - principal, - } - resp, err := a.Client.performQuery(http.MethodGet, "/secrets/acls/get", data, nil) - if err != nil { - return aclItem, err - } - - err = json.Unmarshal(resp, &aclItem) - return aclItem, err -} - -// ListSecretACLs lists the ACLs set on the given scope -func (a SecretsAPI) ListSecretACLs(scope string) ([]models.AclItem, error) { - var aclItem struct { - Acls []models.AclItem `json:"acls,omitempty" url:"acls,omitempty"` - } - - data := struct { - Scope string `json:"scope,omitempty" url:"scope,omitempty"` - }{ - scope, - } - resp, err := a.Client.performQuery(http.MethodGet, "/secrets/acls/list", data, nil) - if err != nil { - return aclItem.Acls, err - } - - err = json.Unmarshal(resp, &aclItem) - return aclItem.Acls, err -} +package azure + +import ( + "encoding/base64" + "encoding/json" + "net/http" + + "github.com/xinsnake/databricks-sdk-golang/azure/models" +) + +// SecretsAPI exposes the Secrets API +type SecretsAPI struct { + Client DBClient +} + +func (a SecretsAPI) init(client DBClient) SecretsAPI { + a.Client = client + return a +} + +// CreateSecretScope creates a new secret scope +func (a SecretsAPI) CreateSecretScope(scope, initialManagePrincipal string) error { + data := struct { + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + InitialManagePrincipal string `json:"initial_manage_principal,omitempty" url:"initial_manage_principal,omitempty"` + }{ + scope, + initialManagePrincipal, + } + _, err := a.Client.performQuery(http.MethodPost, "/secrets/scopes/create", data, nil) + return err +} + +// DeleteSecretScope deletes a secret scope +func (a SecretsAPI) DeleteSecretScope(scope string) error { + data := struct { + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + }{ + scope, + } + _, err := a.Client.performQuery(http.MethodPost, "/secrets/scopes/delete", data, nil) + return err +} + +// ListSecretScopes lists all secret scopes available in the workspace +func (a SecretsAPI) ListSecretScopes() ([]models.SecretScope, error) { + var listSecretScopesResponse struct { + Scopes []models.SecretScope `json:"scopes,omitempty" url:"scopes,omitempty"` + } + + resp, err := a.Client.performQuery(http.MethodGet, "/secrets/scopes/list", nil, nil) + if err != nil { + return listSecretScopesResponse.Scopes, err + } + + err = json.Unmarshal(resp, &listSecretScopesResponse) + return listSecretScopesResponse.Scopes, err +} + +// PutSecret creates or modifies a bytes secret depends on the type of scope backend with +func (a SecretsAPI) PutSecret(bytesValue []byte, scope, key string) error { + data := struct { + BytesValue string `json:"bytes_value,omitempty" url:"bytes_value,omitempty"` + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + Key string `json:"key,omitempty" url:"key,omitempty"` + }{ + base64.StdEncoding.EncodeToString(bytesValue), + scope, + key, + } + _, err := a.Client.performQuery(http.MethodPost, "/secrets/put", data, nil) + return err +} + +// PutSecretString creates or modifies a string secret depends on the type of scope backend +func (a SecretsAPI) PutSecretString(stringValue, scope, key string) error { + data := struct { + StringValue string `json:"string_value,omitempty" url:"string_value,omitempty"` + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + Key string `json:"key,omitempty" url:"key,omitempty"` + }{ + stringValue, + scope, + key, + } + _, err := a.Client.performQuery(http.MethodPost, "/secrets/put", data, nil) + return err +} + +// DeleteSecret deletes a secret depends on the type of scope backend +func (a SecretsAPI) DeleteSecret(scope, key string) error { + data := struct { + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + Key string `json:"key,omitempty" url:"key,omitempty"` + }{ + scope, + key, + } + _, err := a.Client.performQuery(http.MethodPost, "/secrets/delete", data, nil) + return err +} + +// ListSecrets lists the secret keys that are stored at this scope +func (a SecretsAPI) ListSecrets(scope string) ([]models.SecretMetadata, error) { + var secretsList struct { + Secrets []models.SecretMetadata `json:"secrets,omitempty" url:"secrets,omitempty"` + } + + data := struct { + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + }{ + scope, + } + + resp, err := a.Client.performQuery(http.MethodGet, "/secrets/list", data, nil) + if err != nil { + return secretsList.Secrets, err + } + + err = json.Unmarshal(resp, &secretsList) + return secretsList.Secrets, err +} + +// PutSecretACL creates or overwrites the ACL associated with the given principal (user or group) on the specified scope point +func (a SecretsAPI) PutSecretACL(scope, principal string, permission models.AclPermission) error { + data := struct { + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + Principal string `json:"principal,omitempty" url:"principal,omitempty"` + Permission models.AclPermission `json:"permission,omitempty" url:"permission,omitempty"` + }{ + scope, + principal, + permission, + } + _, err := a.Client.performQuery(http.MethodPost, "/secrets/acls/put", data, nil) + return err +} + +// DeleteSecretACL deletes the given ACL on the given scope +func (a SecretsAPI) DeleteSecretACL(scope, principal string) error { + data := struct { + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + Principal string `json:"principal,omitempty" url:"principal,omitempty"` + }{ + scope, + principal, + } + _, err := a.Client.performQuery(http.MethodPost, "/secrets/acls/delete", data, nil) + return err +} + +// GetSecretACL describe the details about the given ACL, such as the group and permission +func (a SecretsAPI) GetSecretACL(scope, principal string) (models.AclItem, error) { + var aclItem models.AclItem + + data := struct { + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + Principal string `json:"principal,omitempty" url:"principal,omitempty"` + }{ + scope, + principal, + } + resp, err := a.Client.performQuery(http.MethodGet, "/secrets/acls/get", data, nil) + if err != nil { + return aclItem, err + } + + err = json.Unmarshal(resp, &aclItem) + return aclItem, err +} + +// ListSecretACLs lists the ACLs set on the given scope +func (a SecretsAPI) ListSecretACLs(scope string) ([]models.AclItem, error) { + var aclItem struct { + Acls []models.AclItem `json:"acls,omitempty" url:"acls,omitempty"` + } + + data := struct { + Scope string `json:"scope,omitempty" url:"scope,omitempty"` + }{ + scope, + } + resp, err := a.Client.performQuery(http.MethodGet, "/secrets/acls/list", data, nil) + if err != nil { + return aclItem.Acls, err + } + + err = json.Unmarshal(resp, &aclItem) + return aclItem.Acls, err +} diff --git a/azure/token.go b/azure/token.go index de4a1f2..89848a8 100644 --- a/azure/token.go +++ b/azure/token.go @@ -1,70 +1,70 @@ -package azure - -import ( - "encoding/json" - "net/http" - - "github.com/xinsnake/databricks-sdk-golang/azure/models" -) - -// TokenAPI exposes the Token API -type TokenAPI struct { - Client DBClient -} - -func (a TokenAPI) init(client DBClient) TokenAPI { - a.Client = client - return a -} - -// TokenCreateResponse is the response from Create -type TokenCreateResponse struct { - TokenValue string `json:"token_value,omitempty" url:"token_value,omitempty"` - TokenInfo models.PublicTokenInfo `json:"token_info,omitempty" url:"token_info,omitempty"` -} - -// Create creates and return a token -func (a SecretsAPI) Create(lifetimeSeconds int64, comment string) (TokenCreateResponse, error) { - var createResponse TokenCreateResponse - - data := struct { - LifetimeSeconds int64 `json:"lifetime_seconds,omitempty" url:"lifetime_seconds,omitempty"` - Comment string `json:"comment,omitempty" url:"comment,omitempty"` - }{ - lifetimeSeconds, - comment, - } - resp, err := a.Client.performQuery(http.MethodPost, "/token/create", data, nil) - if err != nil { - return createResponse, err - } - - err = json.Unmarshal(resp, &createResponse) - return createResponse, err -} - -// List lists all the valid tokens for a user-workspace pair -func (a SecretsAPI) List() ([]models.PublicTokenInfo, error) { - var publicTokenInfo struct { - TokenInfos []models.PublicTokenInfo `json:"token_infos,omitempty" url:"token_infos,omitempty"` - } - - resp, err := a.Client.performQuery(http.MethodGet, "/token/list", nil, nil) - if err != nil { - return publicTokenInfo.TokenInfos, err - } - - err = json.Unmarshal(resp, &publicTokenInfo) - return publicTokenInfo.TokenInfos, err -} - -// Revoke revokes an access token -func (a SecretsAPI) Revoke(tokenID string) error { - data := struct { - TokenID string `json:"token_id,omitempty" url:"token_id,omitempty"` - }{ - tokenID, - } - _, err := a.Client.performQuery(http.MethodPost, "/token/delete", data, nil) - return err -} +package azure + +import ( + "encoding/json" + "net/http" + + "github.com/xinsnake/databricks-sdk-golang/azure/models" +) + +// TokenAPI exposes the Token API +type TokenAPI struct { + Client DBClient +} + +func (a TokenAPI) init(client DBClient) TokenAPI { + a.Client = client + return a +} + +// TokenCreateResponse is the response from Create +type TokenCreateResponse struct { + TokenValue string `json:"token_value,omitempty" url:"token_value,omitempty"` + TokenInfo models.PublicTokenInfo `json:"token_info,omitempty" url:"token_info,omitempty"` +} + +// Create creates and return a token +func (a SecretsAPI) Create(lifetimeSeconds int64, comment string) (TokenCreateResponse, error) { + var createResponse TokenCreateResponse + + data := struct { + LifetimeSeconds int64 `json:"lifetime_seconds,omitempty" url:"lifetime_seconds,omitempty"` + Comment string `json:"comment,omitempty" url:"comment,omitempty"` + }{ + lifetimeSeconds, + comment, + } + resp, err := a.Client.performQuery(http.MethodPost, "/token/create", data, nil) + if err != nil { + return createResponse, err + } + + err = json.Unmarshal(resp, &createResponse) + return createResponse, err +} + +// List lists all the valid tokens for a user-workspace pair +func (a SecretsAPI) List() ([]models.PublicTokenInfo, error) { + var publicTokenInfo struct { + TokenInfos []models.PublicTokenInfo `json:"token_infos,omitempty" url:"token_infos,omitempty"` + } + + resp, err := a.Client.performQuery(http.MethodGet, "/token/list", nil, nil) + if err != nil { + return publicTokenInfo.TokenInfos, err + } + + err = json.Unmarshal(resp, &publicTokenInfo) + return publicTokenInfo.TokenInfos, err +} + +// Revoke revokes an access token +func (a SecretsAPI) Revoke(tokenID string) error { + data := struct { + TokenID string `json:"token_id,omitempty" url:"token_id,omitempty"` + }{ + tokenID, + } + _, err := a.Client.performQuery(http.MethodPost, "/token/delete", data, nil) + return err +} diff --git a/azure/workspace.go b/azure/workspace.go index f8bb88c..3f12c1b 100644 --- a/azure/workspace.go +++ b/azure/workspace.go @@ -1,133 +1,133 @@ -package azure - -import ( - "encoding/base64" - "encoding/json" - "net/http" - - "github.com/xinsnake/databricks-sdk-golang/azure/models" -) - -// WorkspaceAPI exposes the Workspace API -type WorkspaceAPI struct { - Client DBClient -} - -func (a WorkspaceAPI) init(client DBClient) WorkspaceAPI { - a.Client = client - return a -} - -// Delete deletes an object or a directory (and optionally recursively deletes all objects in the directory) -func (a WorkspaceAPI) Delete(path string, recursive bool) error { - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - Recursive bool `json:"recursive,omitempty" url:"recursive,omitempty"` - }{ - path, - recursive, - } - _, err := a.Client.performQuery(http.MethodPost, "/workspace/delete", data, nil) - return err -} - -// Export exports a notebook or contents of an entire directory -func (a WorkspaceAPI) Export(path string, format models.ExportFormat, directDownload bool) ([]byte, error) { - var exportResponse struct { - Content string `json:"content,omitempty" url:"content,omitempty"` - } - - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - Format models.ExportFormat `json:"format,omitempty" url:"format,omitempty"` - DirectDownload bool `json:"direct_download,omitempty" url:"direct_download,omitempty"` - }{ - path, - format, - directDownload, - } - - resp, err := a.Client.performQuery(http.MethodGet, "/workspace/export", data, nil) - if err != nil { - return []byte{}, err - } - - err = json.Unmarshal(resp, &exportResponse) - if err != nil { - return []byte{}, err - } - - return base64.StdEncoding.DecodeString(exportResponse.Content) -} - -// GetStatus gets the status of an object or a directory -func (a WorkspaceAPI) GetStatus(path string) (models.ObjectInfo, error) { - var objectInfo models.ObjectInfo - - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - }{ - path, - } - - resp, err := a.Client.performQuery(http.MethodGet, "/workspace/get-status", data, nil) - if err != nil { - return objectInfo, err - } - - err = json.Unmarshal(resp, &objectInfo) - return objectInfo, err -} - -// Import imports a notebook or the contents of an entire directory -func (a WorkspaceAPI) Import(path string, format models.ExportFormat, - language models.Language, content []byte, overwrite bool) error { - - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - Format models.ExportFormat `json:"format,omitempty" url:"format,omitempty"` - Language models.Language `json:"language,omitempty" url:"language,omitempty"` - Content string `json:"content,omitempty" url:"content,omitempty"` - Overwrite bool `json:"overwrite,omitempty" url:"overwrite,omitempty"` - }{ - path, - format, - language, - base64.StdEncoding.EncodeToString(content), - overwrite, - } - _, err := a.Client.performQuery(http.MethodPost, "/workspace/import", data, nil) - return err -} - -// List lists the contents of a directory, or the object if it is not a directory -func (a WorkspaceAPI) List(path string) ([]models.ObjectInfo, error) { - var listResponse struct { - Objects []models.ObjectInfo `json:"objects,omitempty" url:"objects,omitempty"` - } - - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - }{ - path, - } - - resp, err := a.Client.performQuery(http.MethodGet, "/workspace/list", data, nil) - if err != nil { - return listResponse.Objects, err - } - - err = json.Unmarshal(resp, &listResponse) - return listResponse.Objects, err -} - -// Mkdirs creates the given directory and necessary parent directories if they do not exists -func (a WorkspaceAPI) Mkdirs(path string) error { - data := struct { - Path string `json:"path,omitempty" url:"path,omitempty"` - }{ - path, - } - _, err := a.Client.performQuery(http.MethodPost, "/workspace/mkdirs", data, nil) - return err -} +package azure + +import ( + "encoding/base64" + "encoding/json" + "net/http" + + "github.com/xinsnake/databricks-sdk-golang/azure/models" +) + +// WorkspaceAPI exposes the Workspace API +type WorkspaceAPI struct { + Client DBClient +} + +func (a WorkspaceAPI) init(client DBClient) WorkspaceAPI { + a.Client = client + return a +} + +// Delete deletes an object or a directory (and optionally recursively deletes all objects in the directory) +func (a WorkspaceAPI) Delete(path string, recursive bool) error { + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + Recursive bool `json:"recursive,omitempty" url:"recursive,omitempty"` + }{ + path, + recursive, + } + _, err := a.Client.performQuery(http.MethodPost, "/workspace/delete", data, nil) + return err +} + +// Export exports a notebook or contents of an entire directory +func (a WorkspaceAPI) Export(path string, format models.ExportFormat, directDownload bool) ([]byte, error) { + var exportResponse struct { + Content string `json:"content,omitempty" url:"content,omitempty"` + } + + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + Format models.ExportFormat `json:"format,omitempty" url:"format,omitempty"` + DirectDownload bool `json:"direct_download,omitempty" url:"direct_download,omitempty"` + }{ + path, + format, + directDownload, + } + + resp, err := a.Client.performQuery(http.MethodGet, "/workspace/export", data, nil) + if err != nil { + return []byte{}, err + } + + err = json.Unmarshal(resp, &exportResponse) + if err != nil { + return []byte{}, err + } + + return base64.StdEncoding.DecodeString(exportResponse.Content) +} + +// GetStatus gets the status of an object or a directory +func (a WorkspaceAPI) GetStatus(path string) (models.ObjectInfo, error) { + var objectInfo models.ObjectInfo + + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + }{ + path, + } + + resp, err := a.Client.performQuery(http.MethodGet, "/workspace/get-status", data, nil) + if err != nil { + return objectInfo, err + } + + err = json.Unmarshal(resp, &objectInfo) + return objectInfo, err +} + +// Import imports a notebook or the contents of an entire directory +func (a WorkspaceAPI) Import(path string, format models.ExportFormat, + language models.Language, content []byte, overwrite bool) error { + + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + Format models.ExportFormat `json:"format,omitempty" url:"format,omitempty"` + Language models.Language `json:"language,omitempty" url:"language,omitempty"` + Content string `json:"content,omitempty" url:"content,omitempty"` + Overwrite bool `json:"overwrite,omitempty" url:"overwrite,omitempty"` + }{ + path, + format, + language, + base64.StdEncoding.EncodeToString(content), + overwrite, + } + _, err := a.Client.performQuery(http.MethodPost, "/workspace/import", data, nil) + return err +} + +// List lists the contents of a directory, or the object if it is not a directory +func (a WorkspaceAPI) List(path string) ([]models.ObjectInfo, error) { + var listResponse struct { + Objects []models.ObjectInfo `json:"objects,omitempty" url:"objects,omitempty"` + } + + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + }{ + path, + } + + resp, err := a.Client.performQuery(http.MethodGet, "/workspace/list", data, nil) + if err != nil { + return listResponse.Objects, err + } + + err = json.Unmarshal(resp, &listResponse) + return listResponse.Objects, err +} + +// Mkdirs creates the given directory and necessary parent directories if they do not exists +func (a WorkspaceAPI) Mkdirs(path string) error { + data := struct { + Path string `json:"path,omitempty" url:"path,omitempty"` + }{ + path, + } + _, err := a.Client.performQuery(http.MethodPost, "/workspace/mkdirs", data, nil) + return err +} diff --git a/databricks.go b/databricks.go index 9fb62af..0e54103 100644 --- a/databricks.go +++ b/databricks.go @@ -1,87 +1,87 @@ -package databricks - -import ( - "crypto/tls" - "encoding/base64" - "fmt" - "net/http" - "net/url" - "time" -) - -// DBClientOption is used to configure the DataBricks Client -type DBClientOption struct { - User string - Password string - Host string - Token string - DefaultHeaders map[string]string - InsecureSkipVerify bool - TimeoutSeconds int - client http.Client -} - -// Init initializes the client -func (o *DBClientOption) Init() { - if o.TimeoutSeconds == 0 { - o.TimeoutSeconds = 10 - } - o.client = http.Client{ - Timeout: time.Duration(time.Duration(o.TimeoutSeconds) * time.Second), - Transport: &http.Transport{ - TLSClientConfig: &tls.Config{ - InsecureSkipVerify: o.InsecureSkipVerify, - }, - }, - } -} - -func (o *DBClientOption) getHTTPClient() http.Client { - return o.client -} - -func (o *DBClientOption) getAuthHeader() map[string]string { - auth := make(map[string]string) - if o.User != "" && o.Password != "" { - encodedAuth := []byte(o.User + ":" + o.Password) - userHeaderData := "Basic " + base64.StdEncoding.EncodeToString(encodedAuth) - auth["Authorization"] = userHeaderData - auth["Content-Type"] = "application/json" - } else if o.Token != "" { - auth["Authorization"] = "Bearer " + o.Token - auth["Content-Type"] = "application/json" - } - return auth -} - -func (o *DBClientOption) getUserAgentHeader() map[string]string { - return map[string]string{ - "User-Agent": fmt.Sprintf("databricks-sdk-golang-%s", SdkVersion), - } -} - -func (o *DBClientOption) getDefaultHeaders() map[string]string { - auth := o.getAuthHeader() - userAgent := o.getUserAgentHeader() - - defaultHeaders := make(map[string]string) - for k, v := range auth { - defaultHeaders[k] = v - } - for k, v := range o.DefaultHeaders { - defaultHeaders[k] = v - } - for k, v := range userAgent { - defaultHeaders[k] = v - } - return defaultHeaders -} - -func (o *DBClientOption) getRequestURI(path string) (string, error) { - parsedURI, err := url.Parse(o.Host) - if err != nil { - return "", err - } - requestURI := fmt.Sprintf("%s://%s/api/%s%s", parsedURI.Scheme, parsedURI.Host, APIVersion, path) - return requestURI, nil -} +package databricks + +import ( + "crypto/tls" + "encoding/base64" + "fmt" + "net/http" + "net/url" + "time" +) + +// DBClientOption is used to configure the DataBricks Client +type DBClientOption struct { + User string + Password string + Host string + Token string + DefaultHeaders map[string]string + InsecureSkipVerify bool + TimeoutSeconds int + client http.Client +} + +// Init initializes the client +func (o *DBClientOption) Init() { + if o.TimeoutSeconds == 0 { + o.TimeoutSeconds = 10 + } + o.client = http.Client{ + Timeout: time.Duration(time.Duration(o.TimeoutSeconds) * time.Second), + Transport: &http.Transport{ + TLSClientConfig: &tls.Config{ + InsecureSkipVerify: o.InsecureSkipVerify, + }, + }, + } +} + +func (o *DBClientOption) getHTTPClient() http.Client { + return o.client +} + +func (o *DBClientOption) getAuthHeader() map[string]string { + auth := make(map[string]string) + if o.User != "" && o.Password != "" { + encodedAuth := []byte(o.User + ":" + o.Password) + userHeaderData := "Basic " + base64.StdEncoding.EncodeToString(encodedAuth) + auth["Authorization"] = userHeaderData + auth["Content-Type"] = "application/json" + } else if o.Token != "" { + auth["Authorization"] = "Bearer " + o.Token + auth["Content-Type"] = "application/json" + } + return auth +} + +func (o *DBClientOption) getUserAgentHeader() map[string]string { + return map[string]string{ + "User-Agent": fmt.Sprintf("databricks-sdk-golang-%s", SdkVersion), + } +} + +func (o *DBClientOption) getDefaultHeaders() map[string]string { + auth := o.getAuthHeader() + userAgent := o.getUserAgentHeader() + + defaultHeaders := make(map[string]string) + for k, v := range auth { + defaultHeaders[k] = v + } + for k, v := range o.DefaultHeaders { + defaultHeaders[k] = v + } + for k, v := range userAgent { + defaultHeaders[k] = v + } + return defaultHeaders +} + +func (o *DBClientOption) getRequestURI(path string) (string, error) { + parsedURI, err := url.Parse(o.Host) + if err != nil { + return "", err + } + requestURI := fmt.Sprintf("%s://%s/api/%s%s", parsedURI.Scheme, parsedURI.Host, APIVersion, path) + return requestURI, nil +} diff --git a/deepcopy_generated.go b/deepcopy_generated.go deleted file mode 100644 index 3d9bdf0..0000000 --- a/deepcopy_generated.go +++ /dev/null @@ -1,29 +0,0 @@ -// +build !ignore_autogenerated - -// Code generated by deepcopy-gen. DO NOT EDIT. - -package databricks - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DBClientOption) DeepCopyInto(out *DBClientOption) { - *out = *in - if in.DefaultHeaders != nil { - in, out := &in.DefaultHeaders, &out.DefaultHeaders - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - //in.client.DeepCopyInto(&out.client) - return -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DBClientOption. -func (in *DBClientOption) DeepCopy() *DBClientOption { - if in == nil { - return nil - } - out := new(DBClientOption) - in.DeepCopyInto(out) - return out -} diff --git a/doc.go b/doc.go index 97348b4..1e1a3ad 100644 --- a/doc.go +++ b/doc.go @@ -1,2 +1,2 @@ -// +k8s:deepcopy-gen=package -package databricks +// +k8s:deepcopy-gen=package +package databricks diff --git a/go.mod b/go.mod index 02ab35a..7895079 100644 --- a/go.mod +++ b/go.mod @@ -1,11 +1,11 @@ -module github.com/xinsnake/databricks-sdk-golang - -go 1.12 - -require ( - github.com/google/go-querystring v1.0.0 - github.com/onsi/ginkgo v1.10.2 - github.com/onsi/gomega v1.5.0 - golang.org/x/net v0.0.0-20190620200207-3b0461eec859 // indirect - golang.org/x/sync v0.0.0-20190423024810-112230192c58 // indirect -) +module github.com/xinsnake/databricks-sdk-golang + +go 1.12 + +require ( + github.com/google/go-querystring v1.0.0 + github.com/onsi/ginkgo v1.10.2 + github.com/onsi/gomega v1.5.0 + golang.org/x/net v0.0.0-20190620200207-3b0461eec859 // indirect + golang.org/x/sync v0.0.0-20190423024810-112230192c58 // indirect +) diff --git a/go.sum b/go.sum index 160694b..dff76f6 100644 --- a/go.sum +++ b/go.sum @@ -1,33 +1,33 @@ -github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= -github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk= -github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= -github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI= -github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.10.2 h1:uqH7bpe+ERSiDa34FDOF7RikN6RzXgduUF8yarlZp94= -github.com/onsi/ginkgo v1.10.2/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/gomega v1.5.0 h1:izbySO9zDPmjJ8rDjLvkA2zJHIo+HkYXHnf7eN7SSyo= -github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859 h1:R/3boaszxrf1GEUWTVDzSKVwLmSJpwZ1yqXm8j0v2QI= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a h1:1BGLXjeY4akVXGgbC9HugT3Jv3hCI0z56oJR5vAMgBU= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4= -gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gopkg.in/yaml.v2 v2.2.1 h1:mUhvW9EsL+naU5Q3cakzfE91YhliOondGd6ZrsDBHQE= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk= +github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= +github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.10.2 h1:uqH7bpe+ERSiDa34FDOF7RikN6RzXgduUF8yarlZp94= +github.com/onsi/ginkgo v1.10.2/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/gomega v1.5.0 h1:izbySO9zDPmjJ8rDjLvkA2zJHIo+HkYXHnf7eN7SSyo= +github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859 h1:R/3boaszxrf1GEUWTVDzSKVwLmSJpwZ1yqXm8j0v2QI= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a h1:1BGLXjeY4akVXGgbC9HugT3Jv3hCI0z56oJR5vAMgBU= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.1 h1:mUhvW9EsL+naU5Q3cakzfE91YhliOondGd6ZrsDBHQE= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/query.go b/query.go index 5098fdf..8b16e08 100644 --- a/query.go +++ b/query.go @@ -1,71 +1,71 @@ -package databricks - -import ( - "bytes" - "encoding/json" - "fmt" - "io/ioutil" - "net/http" - - "github.com/google/go-querystring/query" -) - -// PerformQuery can be used in a client or directly -func PerformQuery(option DBClientOption, method, path string, data interface{}, headers map[string]string) ([]byte, error) { - - requestURL, err := option.getRequestURI(path) - if err != nil { - return nil, err - } - - requestHeaders := option.getDefaultHeaders() - - if len(headers) > 0 { - for k, v := range headers { - requestHeaders[k] = v - } - } - - var requestBody []byte - if method == "GET" { - params, err := query.Values(data) - if err != nil { - return nil, err - } - requestURL += "?" + params.Encode() - } else { - bodyBytes, err := json.Marshal(data) - if err != nil { - return nil, err - } - requestBody = bodyBytes - } - - client := option.getHTTPClient() - - request, err := http.NewRequest(method, requestURL, bytes.NewBuffer(requestBody)) - if err != nil { - return nil, err - } - for k, v := range requestHeaders { - request.Header.Set(k, v) - } - - resp, err := client.Do(request) - if err != nil { - return nil, err - } - - defer resp.Body.Close() - - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - return nil, err - } - - if resp.StatusCode >= 400 { - return nil, fmt.Errorf("Response from server (%d) %s", resp.StatusCode, string(body)) - } - - return body, nil -} +package databricks + +import ( + "bytes" + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + + "github.com/google/go-querystring/query" +) + +// PerformQuery can be used in a client or directly +func PerformQuery(option DBClientOption, method, path string, data interface{}, headers map[string]string) ([]byte, error) { + + requestURL, err := option.getRequestURI(path) + if err != nil { + return nil, err + } + + requestHeaders := option.getDefaultHeaders() + + if len(headers) > 0 { + for k, v := range headers { + requestHeaders[k] = v + } + } + + var requestBody []byte + if method == "GET" { + params, err := query.Values(data) + if err != nil { + return nil, err + } + requestURL += "?" + params.Encode() + } else { + bodyBytes, err := json.Marshal(data) + if err != nil { + return nil, err + } + requestBody = bodyBytes + } + + client := option.getHTTPClient() + + request, err := http.NewRequest(method, requestURL, bytes.NewBuffer(requestBody)) + if err != nil { + return nil, err + } + for k, v := range requestHeaders { + request.Header.Set(k, v) + } + + resp, err := client.Do(request) + if err != nil { + return nil, err + } + + defer resp.Body.Close() + + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + if resp.StatusCode >= 400 { + return nil, fmt.Errorf("Response from server (%d) %s", resp.StatusCode, string(body)) + } + + return body, nil +} diff --git a/version.go b/version.go index 33e7a08..cd2fc1d 100644 --- a/version.go +++ b/version.go @@ -1,8 +1,8 @@ -package databricks - -const ( - // APIVersion is the version of the RESTful API of DataBricks - APIVersion = "2.0" - // SdkVersion is the version of this library - SdkVersion = "0.1.3" -) +package databricks + +const ( + // APIVersion is the version of the RESTful API of DataBricks + APIVersion = "2.0" + // SdkVersion is the version of this library + SdkVersion = "0.1.3" +)