This commit is contained in:
siliang.j 2021-01-03 13:02:23 +09:00
parent 493390987f
commit 0ceac36918
204 changed files with 6768 additions and 6768 deletions

View File

@ -1,57 +1,57 @@
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
FROM golang:1.12.5
# Avoid warnings by switching to noninteractive
ENV DEBIAN_FRONTEND=noninteractive
# Configure apt, install packages and tools
RUN apt-get update \
&& apt-get -y install --no-install-recommends apt-utils 2>&1 \
# Verify git, process tools, lsb-release (common in install instructions for CLIs) installed
&& apt-get -y install git procps lsb-release \
# Install Editor
&& apt-get install vim -y \
# Install gocode-gomod
&& go get -x -d github.com/stamblerre/gocode 2>&1 \
&& go build -o gocode-gomod github.com/stamblerre/gocode \
&& mv gocode-gomod $GOPATH/bin/ \
# Clean up
&& apt-get autoremove -y \
&& apt-get clean -y \
&& rm -rf /var/lib/apt/lists/*
# Enable go modules
ENV GO111MODULE=on
# Install Go tools
RUN \
# --> Go language server
go get golang.org/x/tools/gopls@v0.3.3 \
# --> GolangCI-lint
&& curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sed 's/tar -/tar --no-same-owner -/g' | sh -s -- -b $(go env GOPATH)/bin \
# --> Delve for debugging
&& go get github.com/go-delve/delve/cmd/dlv@v1.4.0 \
# --> Go-outline for extracting a JSON representation of the declarations in a Go source file
&& go get -v github.com/ramya-rao-a/go-outline \
&& rm -rf /go/src/ && rm -rf /go/pkg
RUN apt-get update \
# Install Docker CE CLI
&& apt-get install -y apt-transport-https ca-certificates curl gnupg-agent software-properties-common lsb-release \
&& curl -fsSL https://download.docker.com/linux/$(lsb_release -is | tr '[:upper:]' '[:lower:]')/gpg | apt-key add - 2>/dev/null \
&& add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/$(lsb_release -is | tr '[:upper:]' '[:lower:]') $(lsb_release -cs) stable" \
&& apt-get update \
&& apt-get install -y docker-ce-cli
# Verify git, process tools installed
RUN apt-get -y install git procps wget nano zsh inotify-tools jq
RUN wget https://github.com/robbyrussell/oh-my-zsh/raw/master/tools/install.sh -O - | zsh || true
COPY ./Makefile ./
RUN mkdir -p /go/src/github.com/xinsnake/databricks-sdk-golang
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
FROM golang:1.12.5
# Avoid warnings by switching to noninteractive
ENV DEBIAN_FRONTEND=noninteractive
# Configure apt, install packages and tools
RUN apt-get update \
&& apt-get -y install --no-install-recommends apt-utils 2>&1 \
# Verify git, process tools, lsb-release (common in install instructions for CLIs) installed
&& apt-get -y install git procps lsb-release \
# Install Editor
&& apt-get install vim -y \
# Install gocode-gomod
&& go get -x -d github.com/stamblerre/gocode 2>&1 \
&& go build -o gocode-gomod github.com/stamblerre/gocode \
&& mv gocode-gomod $GOPATH/bin/ \
# Clean up
&& apt-get autoremove -y \
&& apt-get clean -y \
&& rm -rf /var/lib/apt/lists/*
# Enable go modules
ENV GO111MODULE=on
# Install Go tools
RUN \
# --> Go language server
go get golang.org/x/tools/gopls@v0.3.3 \
# --> GolangCI-lint
&& curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sed 's/tar -/tar --no-same-owner -/g' | sh -s -- -b $(go env GOPATH)/bin \
# --> Delve for debugging
&& go get github.com/go-delve/delve/cmd/dlv@v1.4.0 \
# --> Go-outline for extracting a JSON representation of the declarations in a Go source file
&& go get -v github.com/ramya-rao-a/go-outline \
&& rm -rf /go/src/ && rm -rf /go/pkg
RUN apt-get update \
# Install Docker CE CLI
&& apt-get install -y apt-transport-https ca-certificates curl gnupg-agent software-properties-common lsb-release \
&& curl -fsSL https://download.docker.com/linux/$(lsb_release -is | tr '[:upper:]' '[:lower:]')/gpg | apt-key add - 2>/dev/null \
&& add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/$(lsb_release -is | tr '[:upper:]' '[:lower:]') $(lsb_release -cs) stable" \
&& apt-get update \
&& apt-get install -y docker-ce-cli
# Verify git, process tools installed
RUN apt-get -y install git procps wget nano zsh inotify-tools jq
RUN wget https://github.com/robbyrussell/oh-my-zsh/raw/master/tools/install.sh -O - | zsh || true
COPY ./Makefile ./
RUN mkdir -p /go/src/github.com/xinsnake/databricks-sdk-golang
ENV SHELL /bin/bash

View File

@ -1,48 +1,48 @@
// If you want to run as a non-root user in the container, see .devcontainer/docker-compose.yml.
{
"name": "Go",
"dockerComposeFile": "docker-compose.yml",
"service": "docker-in-docker",
"workspaceFolder": "/go/src/github.com/xinsnake/databricks-sdk-golang",
"postCreateCommand": "",
"shutdownAction": "stopCompose",
"extensions": [
"ms-azuretools.vscode-docker",
"ms-vscode.go"
],
"settings": {
"terminal.integrated.shell.linux": "zsh",
"go.gopath": "/go",
"go.useLanguageServer": true,
"[go]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true,
},
// Optional: Disable snippets, as they conflict with completion ranking.
"editor.snippetSuggestions": "none",
},
"[go.mod]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true,
},
},
"gopls": {
"usePlaceholders": true, // add parameter placeholders when completing a function
// Experimental settings
"completeUnimported": true, // autocomplete unimported packages
"deepCompletion": true // enable deep completion
},
"go.toolsEnvVars": {
"GO111MODULE": "on"
},
"go.lintTool": "golangci-lint",
"go.lintFlags": [
"--fast"
],
"remote.extensionKind": {
"ms-azuretools.vscode-docker": "workspace"
}
}
// If you want to run as a non-root user in the container, see .devcontainer/docker-compose.yml.
{
"name": "Go",
"dockerComposeFile": "docker-compose.yml",
"service": "docker-in-docker",
"workspaceFolder": "/go/src/github.com/xinsnake/databricks-sdk-golang",
"postCreateCommand": "",
"shutdownAction": "stopCompose",
"extensions": [
"ms-azuretools.vscode-docker",
"ms-vscode.go"
],
"settings": {
"terminal.integrated.shell.linux": "zsh",
"go.gopath": "/go",
"go.useLanguageServer": true,
"[go]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true,
},
// Optional: Disable snippets, as they conflict with completion ranking.
"editor.snippetSuggestions": "none",
},
"[go.mod]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true,
},
},
"gopls": {
"usePlaceholders": true, // add parameter placeholders when completing a function
// Experimental settings
"completeUnimported": true, // autocomplete unimported packages
"deepCompletion": true // enable deep completion
},
"go.toolsEnvVars": {
"GO111MODULE": "on"
},
"go.lintTool": "golangci-lint",
"go.lintFlags": [
"--fast"
],
"remote.extensionKind": {
"ms-azuretools.vscode-docker": "workspace"
}
}
}

View File

@ -1,21 +1,21 @@
version: '3'
services:
docker-in-docker:
build:
context: ../
dockerfile: .devcontainer/Dockerfile
network_mode: "host"
volumes:
# Update this to wherever you want VS Code to mount the folder of your project
- ..:/go/src/github.com/xinsnake/databricks-sdk-golang
# This lets you avoid setting up Git again in the container
- ~/.gitconfig:/root/.gitconfig
- ~/.ssh:/root/.ssh:ro # does not work on Windows! Will need to generate in container :(
# Forwarding the socket is optional, but lets docker work inside the container if you install the Docker CLI.
# See the docker-in-docker-compose definition for details on how to install it.
- /var/run/docker.sock:/var/run/docker.sock
# Overrides default command so things don't shut down after the process ends - useful for debugging
command: sleep infinity
version: '3'
services:
docker-in-docker:
build:
context: ../
dockerfile: .devcontainer/Dockerfile
network_mode: "host"
volumes:
# Update this to wherever you want VS Code to mount the folder of your project
- ..:/go/src/github.com/xinsnake/databricks-sdk-golang
# This lets you avoid setting up Git again in the container
- ~/.gitconfig:/root/.gitconfig
- ~/.ssh:/root/.ssh:ro # does not work on Windows! Will need to generate in container :(
# Forwarding the socket is optional, but lets docker work inside the container if you install the Docker CLI.
# See the docker-in-docker-compose definition for details on how to install it.
- /var/run/docker.sock:/var/run/docker.sock
# Overrides default command so things don't shut down after the process ends - useful for debugging
command: sleep infinity

View File

@ -1,3 +1,3 @@
{
"go.inferGopath": false
{
"go.inferGopath": false
}

24
LICENSE
View File

@ -1,13 +1,13 @@
Copyright 2019 Xinyun Zhou
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
Copyright 2019 Xinyun Zhou
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,14 +1,14 @@
all : checks test
checks:
go build all
golangci-lint run
test: checks
go test ./...
fmt:
find . -name '*.go' | grep -v vendor | xargs gofmt -s -w
deepcopy:
./cmd/deepcopy-gen -i ./,./aws/...,./azure/... -h ./hack/boilerplate.go.txt -v 3
all : checks test
checks:
go build all
golangci-lint run
test: checks
go test ./...
fmt:
find . -name '*.go' | grep -v vendor | xargs gofmt -s -w
deepcopy:
./cmd/deepcopy-gen -i ./,./aws/...,./azure/... -h ./hack/boilerplate.go.txt -v 3

View File

@ -1,47 +1,47 @@
# databricks-sdk-golang
This is a Golang SDK for [DataBricks REST API 2.0](https://docs.databricks.com/api/latest/index.html#) and [Azure DataBricks REST API 2.0](https://docs.azuredatabricks.net/api/latest/index.html).
**WARNING: The SDK is unstable and under development. More testing needed!**
## Usage
```go
import (
databricks "github.com/xinsnake/databricks-sdk-golang"
dbAzure "github.com/xinsnake/databricks-sdk-golang/azure"
// dbAws "github.com/xinsnake/databricks-sdk-golang/aws"
)
var o databricks.DBClientOption
o.Host = os.Getenv("DATABRICKS_HOST")
o.Token = os.Getenv("DATABRICKS_TOKEN")
var c dbAzure.DBClient
c.Init(o)
jobs, err := c.Jobs().List()
```
## Implementation Progress
Everything except SCIM API are implemented. Please refer to the progress below:
| API | AWS | Azure |
| :--- | :---: | :---: |
| Clusters API | ✔ | ✔ |
| DBFS API | ✔ | ✔ |
| Groups API | ✔ | ✔ |
| Instance Pools API (preview) | ✗ | ✗ |
| Instance Profiles API | ✔ | N/A |
| Jobs API | ✔ | ✔ |
| Libraries API | ✔ | ✔ |
| MLflow API | ✗ | ✗ |
| SCIM API (preview) | ✗ | ✗ |
| Secrets API | ✔ | ✔ |
| Token API | ✔ | ✔ |
| Workspace API | ✔ | ✔ |
## Notes
# databricks-sdk-golang
This is a Golang SDK for [DataBricks REST API 2.0](https://docs.databricks.com/api/latest/index.html#) and [Azure DataBricks REST API 2.0](https://docs.azuredatabricks.net/api/latest/index.html).
**WARNING: The SDK is unstable and under development. More testing needed!**
## Usage
```go
import (
databricks "github.com/xinsnake/databricks-sdk-golang"
dbAzure "github.com/xinsnake/databricks-sdk-golang/azure"
// dbAws "github.com/xinsnake/databricks-sdk-golang/aws"
)
var o databricks.DBClientOption
o.Host = os.Getenv("DATABRICKS_HOST")
o.Token = os.Getenv("DATABRICKS_TOKEN")
var c dbAzure.DBClient
c.Init(o)
jobs, err := c.Jobs().List()
```
## Implementation Progress
Everything except SCIM API are implemented. Please refer to the progress below:
| API | AWS | Azure |
| :--- | :---: | :---: |
| Clusters API | ✔ | ✔ |
| DBFS API | ✔ | ✔ |
| Groups API | ✔ | ✔ |
| Instance Pools API (preview) | ✗ | ✗ |
| Instance Profiles API | ✔ | N/A |
| Jobs API | ✔ | ✔ |
| Libraries API | ✔ | ✔ |
| MLflow API | ✗ | ✗ |
| SCIM API (preview) | ✗ | ✗ |
| Secrets API | ✔ | ✔ |
| Token API | ✔ | ✔ |
| Workspace API | ✔ | ✔ |
## Notes
- [Deepcopy](https://godoc.org/k8s.io/gengo/examples/deepcopy-gen) is generated shall you need it.

View File

@ -1,79 +1,79 @@
package aws
import databricks "github.com/xinsnake/databricks-sdk-golang"
// DBClient is the client for Azure implements DBClient
type DBClient struct {
Option databricks.DBClientOption
}
// Init initializes the client
func (c *DBClient) Init(option databricks.DBClientOption) DBClient {
c.Option = option
option.Init()
return *c
}
// Clusters returns an instance of ClustersAPI
func (c DBClient) Clusters() ClustersAPI {
var clustersAPI ClustersAPI
return clustersAPI.init(c)
}
// Dbfs returns an instance of DbfsAPI
func (c DBClient) Dbfs() DbfsAPI {
var dbfsAPI DbfsAPI
return dbfsAPI.init(c)
}
// Groups returns an instance of GroupAPI
func (c DBClient) Groups() GroupsAPI {
var groupsAPI GroupsAPI
return groupsAPI.init(c)
}
// InstanceProfiles returns an instance of GroupAPI
func (c DBClient) InstanceProfiles() InstanceProfilesAPI {
var instanceProfilesAPI InstanceProfilesAPI
return instanceProfilesAPI.init(c)
}
// Jobs returns an instance of JobsAPI
func (c DBClient) Jobs() JobsAPI {
var jobsAPI JobsAPI
return jobsAPI.init(c)
}
// Libraries returns an instance of LibrariesAPI
func (c DBClient) Libraries() LibrariesAPI {
var libraries LibrariesAPI
return libraries.init(c)
}
// Scim returns an instance of ScimAPI
func (c DBClient) Scim() ScimAPI {
var scimAPI ScimAPI
return scimAPI.init(c)
}
// Secrets returns an instance of SecretsAPI
func (c DBClient) Secrets() SecretsAPI {
var secretsAPI SecretsAPI
return secretsAPI.init(c)
}
// Token returns an instance of TokensAPI
func (c DBClient) Token() TokenAPI {
var tokenAPI TokenAPI
return tokenAPI.init(c)
}
// Workspace returns an instance of WorkspaceAPI
func (c DBClient) Workspace() WorkspaceAPI {
var workspaceAPI WorkspaceAPI
return workspaceAPI.init(c)
}
func (c *DBClient) performQuery(method, path string, data interface{}, headers map[string]string) ([]byte, error) {
return databricks.PerformQuery(c.Option, method, path, data, headers)
}
package aws
import databricks "github.com/xinsnake/databricks-sdk-golang"
// DBClient is the client for Azure implements DBClient
type DBClient struct {
Option databricks.DBClientOption
}
// Init initializes the client
func (c *DBClient) Init(option databricks.DBClientOption) DBClient {
c.Option = option
option.Init()
return *c
}
// Clusters returns an instance of ClustersAPI
func (c DBClient) Clusters() ClustersAPI {
var clustersAPI ClustersAPI
return clustersAPI.init(c)
}
// Dbfs returns an instance of DbfsAPI
func (c DBClient) Dbfs() DbfsAPI {
var dbfsAPI DbfsAPI
return dbfsAPI.init(c)
}
// Groups returns an instance of GroupAPI
func (c DBClient) Groups() GroupsAPI {
var groupsAPI GroupsAPI
return groupsAPI.init(c)
}
// InstanceProfiles returns an instance of GroupAPI
func (c DBClient) InstanceProfiles() InstanceProfilesAPI {
var instanceProfilesAPI InstanceProfilesAPI
return instanceProfilesAPI.init(c)
}
// Jobs returns an instance of JobsAPI
func (c DBClient) Jobs() JobsAPI {
var jobsAPI JobsAPI
return jobsAPI.init(c)
}
// Libraries returns an instance of LibrariesAPI
func (c DBClient) Libraries() LibrariesAPI {
var libraries LibrariesAPI
return libraries.init(c)
}
// Scim returns an instance of ScimAPI
func (c DBClient) Scim() ScimAPI {
var scimAPI ScimAPI
return scimAPI.init(c)
}
// Secrets returns an instance of SecretsAPI
func (c DBClient) Secrets() SecretsAPI {
var secretsAPI SecretsAPI
return secretsAPI.init(c)
}
// Token returns an instance of TokensAPI
func (c DBClient) Token() TokenAPI {
var tokenAPI TokenAPI
return tokenAPI.init(c)
}
// Workspace returns an instance of WorkspaceAPI
func (c DBClient) Workspace() WorkspaceAPI {
var workspaceAPI WorkspaceAPI
return workspaceAPI.init(c)
}
func (c *DBClient) performQuery(method, path string, data interface{}, headers map[string]string) ([]byte, error) {
return databricks.PerformQuery(c.Option, method, path, data, headers)
}

View File

@ -1,250 +1,250 @@
package aws
import (
"encoding/json"
"net/http"
"github.com/xinsnake/databricks-sdk-golang/aws/models"
)
// ClustersAPI exposes the Clusters API
type ClustersAPI struct {
Client DBClient
}
func (a ClustersAPI) init(client DBClient) ClustersAPI {
a.Client = client
return a
}
// Create creates a new Spark cluster
func (a ClustersAPI) Create(cluster models.NewCluster) (models.ClusterInfo, error) {
var clusterInfo models.ClusterInfo
resp, err := a.Client.performQuery(http.MethodPost, "/clusters/create", cluster, nil)
if err != nil {
return clusterInfo, err
}
err = json.Unmarshal(resp, &clusterInfo)
return clusterInfo, err
}
// Edit edits the configuration of a cluster to match the provided attributes and size
func (a ClustersAPI) Edit(clusterInfo models.ClusterInfo) error {
_, err := a.Client.performQuery(http.MethodPost, "/clusters/edit", clusterInfo, nil)
return err
}
// Start starts a terminated Spark cluster given its ID
func (a ClustersAPI) Start(clusterID string) error {
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}{
clusterID,
}
_, err := a.Client.performQuery(http.MethodPost, "/clusters/start", data, nil)
return err
}
// Restart restart a Spark cluster given its ID. If the cluster is not in a RUNNING state, nothing will happen.
func (a ClustersAPI) Restart(clusterID string) error {
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}{
clusterID,
}
_, err := a.Client.performQuery(http.MethodPost, "/clusters/restart", data, nil)
return err
}
// Resize resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a RUNNING state.
func (a ClustersAPI) Resize(clusterID string, clusterSize models.ClusterSize) error {
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
models.ClusterSize
}{
clusterID,
clusterSize,
}
_, err := a.Client.performQuery(http.MethodPost, "/clusters/resize", data, nil)
return err
}
// Terminate terminates a Spark cluster given its ID
func (a ClustersAPI) Terminate(clusterID string) error {
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}{
clusterID,
}
_, err := a.Client.performQuery(http.MethodPost, "/clusters/delete", data, nil)
return err
}
// Delete is an alias of Terminate
func (a ClustersAPI) Delete(clusterID string) error {
return a.Terminate(clusterID)
}
// PermanentDelete permanently delete a cluster
func (a ClustersAPI) PermanentDelete(clusterID string) error {
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}{
clusterID,
}
_, err := a.Client.performQuery(http.MethodPost, "/clusters/permanent-delete", data, nil)
return err
}
// Get retrieves the information for a cluster given its identifier
func (a ClustersAPI) Get(clusterID string) (models.ClusterInfo, error) {
var clusterInfo models.ClusterInfo
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}{
clusterID,
}
resp, err := a.Client.performQuery(http.MethodGet, "/clusters/get-delete", data, nil)
if err != nil {
return clusterInfo, err
}
err = json.Unmarshal(resp, &clusterInfo)
return clusterInfo, err
}
// Pin ensure that an interactive cluster configuration is retained even after a cluster has been terminated for more than 30 days
func (a ClustersAPI) Pin(clusterID string) error {
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}{
clusterID,
}
_, err := a.Client.performQuery(http.MethodPost, "/clusters/pin", data, nil)
return err
}
// Unpin allows the cluster to eventually be removed from the list returned by the List API
func (a ClustersAPI) Unpin(clusterID string) error {
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}{
clusterID,
}
_, err := a.Client.performQuery(http.MethodPost, "/clusters/unpin", data, nil)
return err
}
// List return information about all pinned clusters, currently active clusters,
// up to 70 of the most recently terminated interactive clusters in the past 30 days,
// and up to 30 of the most recently terminated job clusters in the past 30 days
func (a ClustersAPI) List() ([]models.ClusterInfo, error) {
var clusterList = struct {
Clusters []models.ClusterInfo `json:"clusters,omitempty" url:"clusters,omitempty"`
}{}
resp, err := a.Client.performQuery(http.MethodGet, "/clusters/list", nil, nil)
if err != nil {
return clusterList.Clusters, err
}
err = json.Unmarshal(resp, &clusterList)
return clusterList.Clusters, err
}
// ListNodeTypes returns a list of supported Spark node types
func (a ClustersAPI) ListNodeTypes() ([]models.NodeType, error) {
var nodeTypeList = struct {
NodeTypes []models.NodeType `json:"node_types,omitempty" url:"node_types,omitempty"`
}{}
resp, err := a.Client.performQuery(http.MethodGet, "/clusters/list-node-types", nil, nil)
if err != nil {
return nodeTypeList.NodeTypes, err
}
err = json.Unmarshal(resp, &nodeTypeList)
return nodeTypeList.NodeTypes, err
}
// SparkVersions return the list of available Spark versions
func (a ClustersAPI) SparkVersions() ([]models.SparkVersion, error) {
var versionsList = struct {
Versions []models.SparkVersion `json:"versions,omitempty" url:"versions,omitempty"`
}{}
resp, err := a.Client.performQuery(http.MethodGet, "/clusters/spark-versions", nil, nil)
if err != nil {
return versionsList.Versions, err
}
err = json.Unmarshal(resp, &versionsList)
return versionsList.Versions, err
}
// ClustersListZonesResponse is the response from ListZones
type ClustersListZonesResponse struct {
Zones []string `json:"zones,omitempty" url:"zones,omitempty"`
DefaultZone string `json:"default_zone,omitempty" url:"default_zone,omitempty"`
}
// ListZones returns a list of availability zones where clusters can be created in (ex: us-west-2a)
func (a ClustersAPI) ListZones() (ClustersListZonesResponse, error) {
var zonesList ClustersListZonesResponse
resp, err := a.Client.performQuery(http.MethodGet, "/clusters/list-zones", nil, nil)
if err != nil {
return zonesList, err
}
err = json.Unmarshal(resp, &zonesList)
return zonesList, err
}
// ClustersEventsResponse is the response from Events
type ClustersEventsResponse struct {
Events []models.ClusterEvent `json:"events,omitempty" url:"events,omitempty"`
NextPage struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
EndTime int64 `json:"end_time,omitempty" url:"end_time,omitempty"`
Offset int32 `json:"offset,omitempty" url:"offset,omitempty"`
} `json:"next_page,omitempty" url:"next_page,omitempty"`
TotalCount int32 `json:"total_count,omitempty" url:"total_count,omitempty"`
}
// Events retrieves a list of events about the activity of a cluster
func (a ClustersAPI) Events(
clusterID string, startTime, endTime int64, order models.ListOrder,
eventTypes []models.ClusterEventType, offset, limit int64) (ClustersEventsResponse, error) {
var eventsResponse ClustersEventsResponse
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"`
EndTime int64 `json:"end_time,omitempty" url:"end_time,omitempty"`
Order models.ListOrder `json:"order,omitempty" url:"order,omitempty"`
EventTypes []models.ClusterEventType `json:"event_types,omitempty" url:"event_types,omitempty"`
Offset int64 `json:"offset,omitempty" url:"offset,omitempty"`
Limit int64 `json:"limit,omitempty" url:"limit,omitempty"`
}{
clusterID,
startTime,
endTime,
order,
eventTypes,
offset,
limit,
}
resp, err := a.Client.performQuery(http.MethodPost, "/clusters/events", data, nil)
if err != nil {
return eventsResponse, err
}
err = json.Unmarshal(resp, &eventsResponse)
return eventsResponse, err
}
package aws
import (
"encoding/json"
"net/http"
"github.com/xinsnake/databricks-sdk-golang/aws/models"
)
// ClustersAPI exposes the Clusters API
type ClustersAPI struct {
Client DBClient
}
func (a ClustersAPI) init(client DBClient) ClustersAPI {
a.Client = client
return a
}
// Create creates a new Spark cluster
func (a ClustersAPI) Create(cluster models.NewCluster) (models.ClusterInfo, error) {
var clusterInfo models.ClusterInfo
resp, err := a.Client.performQuery(http.MethodPost, "/clusters/create", cluster, nil)
if err != nil {
return clusterInfo, err
}
err = json.Unmarshal(resp, &clusterInfo)
return clusterInfo, err
}
// Edit edits the configuration of a cluster to match the provided attributes and size
func (a ClustersAPI) Edit(clusterInfo models.ClusterInfo) error {
_, err := a.Client.performQuery(http.MethodPost, "/clusters/edit", clusterInfo, nil)
return err
}
// Start starts a terminated Spark cluster given its ID
func (a ClustersAPI) Start(clusterID string) error {
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}{
clusterID,
}
_, err := a.Client.performQuery(http.MethodPost, "/clusters/start", data, nil)
return err
}
// Restart restart a Spark cluster given its ID. If the cluster is not in a RUNNING state, nothing will happen.
func (a ClustersAPI) Restart(clusterID string) error {
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}{
clusterID,
}
_, err := a.Client.performQuery(http.MethodPost, "/clusters/restart", data, nil)
return err
}
// Resize resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a RUNNING state.
func (a ClustersAPI) Resize(clusterID string, clusterSize models.ClusterSize) error {
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
models.ClusterSize
}{
clusterID,
clusterSize,
}
_, err := a.Client.performQuery(http.MethodPost, "/clusters/resize", data, nil)
return err
}
// Terminate terminates a Spark cluster given its ID
func (a ClustersAPI) Terminate(clusterID string) error {
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}{
clusterID,
}
_, err := a.Client.performQuery(http.MethodPost, "/clusters/delete", data, nil)
return err
}
// Delete is an alias of Terminate
func (a ClustersAPI) Delete(clusterID string) error {
return a.Terminate(clusterID)
}
// PermanentDelete permanently delete a cluster
func (a ClustersAPI) PermanentDelete(clusterID string) error {
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}{
clusterID,
}
_, err := a.Client.performQuery(http.MethodPost, "/clusters/permanent-delete", data, nil)
return err
}
// Get retrieves the information for a cluster given its identifier
func (a ClustersAPI) Get(clusterID string) (models.ClusterInfo, error) {
var clusterInfo models.ClusterInfo
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}{
clusterID,
}
resp, err := a.Client.performQuery(http.MethodGet, "/clusters/get-delete", data, nil)
if err != nil {
return clusterInfo, err
}
err = json.Unmarshal(resp, &clusterInfo)
return clusterInfo, err
}
// Pin ensure that an interactive cluster configuration is retained even after a cluster has been terminated for more than 30 days
func (a ClustersAPI) Pin(clusterID string) error {
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}{
clusterID,
}
_, err := a.Client.performQuery(http.MethodPost, "/clusters/pin", data, nil)
return err
}
// Unpin allows the cluster to eventually be removed from the list returned by the List API
func (a ClustersAPI) Unpin(clusterID string) error {
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}{
clusterID,
}
_, err := a.Client.performQuery(http.MethodPost, "/clusters/unpin", data, nil)
return err
}
// List return information about all pinned clusters, currently active clusters,
// up to 70 of the most recently terminated interactive clusters in the past 30 days,
// and up to 30 of the most recently terminated job clusters in the past 30 days
func (a ClustersAPI) List() ([]models.ClusterInfo, error) {
var clusterList = struct {
Clusters []models.ClusterInfo `json:"clusters,omitempty" url:"clusters,omitempty"`
}{}
resp, err := a.Client.performQuery(http.MethodGet, "/clusters/list", nil, nil)
if err != nil {
return clusterList.Clusters, err
}
err = json.Unmarshal(resp, &clusterList)
return clusterList.Clusters, err
}
// ListNodeTypes returns a list of supported Spark node types
func (a ClustersAPI) ListNodeTypes() ([]models.NodeType, error) {
var nodeTypeList = struct {
NodeTypes []models.NodeType `json:"node_types,omitempty" url:"node_types,omitempty"`
}{}
resp, err := a.Client.performQuery(http.MethodGet, "/clusters/list-node-types", nil, nil)
if err != nil {
return nodeTypeList.NodeTypes, err
}
err = json.Unmarshal(resp, &nodeTypeList)
return nodeTypeList.NodeTypes, err
}
// SparkVersions return the list of available Spark versions
func (a ClustersAPI) SparkVersions() ([]models.SparkVersion, error) {
var versionsList = struct {
Versions []models.SparkVersion `json:"versions,omitempty" url:"versions,omitempty"`
}{}
resp, err := a.Client.performQuery(http.MethodGet, "/clusters/spark-versions", nil, nil)
if err != nil {
return versionsList.Versions, err
}
err = json.Unmarshal(resp, &versionsList)
return versionsList.Versions, err
}
// ClustersListZonesResponse is the response from ListZones
type ClustersListZonesResponse struct {
Zones []string `json:"zones,omitempty" url:"zones,omitempty"`
DefaultZone string `json:"default_zone,omitempty" url:"default_zone,omitempty"`
}
// ListZones returns a list of availability zones where clusters can be created in (ex: us-west-2a)
func (a ClustersAPI) ListZones() (ClustersListZonesResponse, error) {
var zonesList ClustersListZonesResponse
resp, err := a.Client.performQuery(http.MethodGet, "/clusters/list-zones", nil, nil)
if err != nil {
return zonesList, err
}
err = json.Unmarshal(resp, &zonesList)
return zonesList, err
}
// ClustersEventsResponse is the response from Events
type ClustersEventsResponse struct {
Events []models.ClusterEvent `json:"events,omitempty" url:"events,omitempty"`
NextPage struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
EndTime int64 `json:"end_time,omitempty" url:"end_time,omitempty"`
Offset int32 `json:"offset,omitempty" url:"offset,omitempty"`
} `json:"next_page,omitempty" url:"next_page,omitempty"`
TotalCount int32 `json:"total_count,omitempty" url:"total_count,omitempty"`
}
// Events retrieves a list of events about the activity of a cluster
func (a ClustersAPI) Events(
clusterID string, startTime, endTime int64, order models.ListOrder,
eventTypes []models.ClusterEventType, offset, limit int64) (ClustersEventsResponse, error) {
var eventsResponse ClustersEventsResponse
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"`
EndTime int64 `json:"end_time,omitempty" url:"end_time,omitempty"`
Order models.ListOrder `json:"order,omitempty" url:"order,omitempty"`
EventTypes []models.ClusterEventType `json:"event_types,omitempty" url:"event_types,omitempty"`
Offset int64 `json:"offset,omitempty" url:"offset,omitempty"`
Limit int64 `json:"limit,omitempty" url:"limit,omitempty"`
}{
clusterID,
startTime,
endTime,
order,
eventTypes,
offset,
limit,
}
resp, err := a.Client.performQuery(http.MethodPost, "/clusters/events", data, nil)
if err != nil {
return eventsResponse, err
}
err = json.Unmarshal(resp, &eventsResponse)
return eventsResponse, err
}

View File

@ -1,204 +1,204 @@
package aws
import (
"encoding/base64"
"encoding/json"
"net/http"
"github.com/xinsnake/databricks-sdk-golang/aws/models"
)
// DbfsAPI exposes the DBFS API
type DbfsAPI struct {
Client DBClient
}
func (a DbfsAPI) init(client DBClient) DbfsAPI {
a.Client = client
return a
}
// AddBlock appends a block of data to the stream specified by the input handle
func (a DbfsAPI) AddBlock(handle int64, data []byte) error {
data2 := struct {
Handle int64 `json:"handle,omitempty" url:"handle,omitempty"`
Data string `json:"data,omitempty" url:"data,omitempty"`
}{
handle,
base64.StdEncoding.EncodeToString(data),
}
_, err := a.Client.performQuery(http.MethodPost, "/dbfs/add-block", data2, nil)
return err
}
// Close closes the stream specified by the input handle
func (a DbfsAPI) Close(handle int64) error {
data := struct {
Handle int64 `json:"handle,omitempty" url:"handle,omitempty"`
}{
handle,
}
_, err := a.Client.performQuery(http.MethodPost, "/dbfs/close", data, nil)
return err
}
// DbfsCreateResponse is the response from Create
type DbfsCreateResponse struct {
Handle int64 `json:"handle,omitempty" url:"handle,omitempty"`
}
// Create opens a stream to write to a file and returns a handle to this stream
func (a DbfsAPI) Create(path string, overwrite bool) (DbfsCreateResponse, error) {
var createResponse DbfsCreateResponse
data := struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
Overwrite bool `json:"overwrite,omitempty" url:"overwrite,omitempty"`
}{
path,
overwrite,
}
resp, err := a.Client.performQuery(http.MethodPost, "/dbfs/create", data, nil)
if err != nil {
return createResponse, err
}
err = json.Unmarshal(resp, &createResponse)
return createResponse, err
}
// Delete deletes the file or directory (optionally recursively delete all files in the directory)
func (a DbfsAPI) Delete(path string, recursive bool) error {
data := struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
Recursive bool `json:"recursive,omitempty" url:"recursive,omitempty"`
}{
path,
recursive,
}
_, err := a.Client.performQuery(http.MethodPost, "/dbfs/delete", data, nil)
return err
}
// GetStatus gets the file information of a file or directory
func (a DbfsAPI) GetStatus(path string) (models.FileInfo, error) {
var fileInfo models.FileInfo
data := struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
}{
path,
}
resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/get-status", data, nil)
if err != nil {
return fileInfo, err
}
err = json.Unmarshal(resp, &fileInfo)
return fileInfo, err
}
// DbfsListResponse is a list of FileInfo as a response of List
type DbfsListResponse struct {
Files []models.FileInfo `json:"files,omitempty" url:"files,omitempty"`
}
// List lists the contents of a directory, or details of the file
func (a DbfsAPI) List(path string) ([]models.FileInfo, error) {
var listResponse DbfsListResponse
data := struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
}{
path,
}
resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/list", data, nil)
if err != nil {
return listResponse.Files, err
}
err = json.Unmarshal(resp, &listResponse)
return listResponse.Files, err
}
// Mkdirs creates the given directory and necessary parent directories if they do not exist
func (a DbfsAPI) Mkdirs(path string) error {
data := struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
}{
path,
}
_, err := a.Client.performQuery(http.MethodPost, "/dbfs/mkdirs", data, nil)
return err
}
// Move moves a file from one location to another location within DBFS
func (a DbfsAPI) Move(sourcePath, destinationPath string) error {
data := struct {
SourcePath string `json:"source_path,omitempty" url:"source_path,omitempty"`
DestinationPath string `json:"destination_path,omitempty" url:"destination_path,omitempty"`
}{
sourcePath,
destinationPath,
}
_, err := a.Client.performQuery(http.MethodPost, "/dbfs/move", data, nil)
return err
}
// Put uploads a file through the use of multipart form post
func (a DbfsAPI) Put(path string, contents []byte, overwrite bool) error {
data := struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
Contents string `json:"contents,omitempty" url:"contents,omitempty"`
Overwrite bool `json:"overwrite,omitempty" url:"overwrite,omitempty"`
}{
path,
base64.StdEncoding.EncodeToString(contents),
overwrite,
}
_, err := a.Client.performQuery(http.MethodPost, "/dbfs/put", data, nil)
return err
}
// DbfsReadResponse is the response of reading a file
type DbfsReadResponse struct {
BytesRead int64 `json:"bytes_read,omitempty" url:"bytes_read,omitempty"`
Data []byte `json:"data,omitempty" url:"data,omitempty"`
}
// Read returns the contents of a file
func (a DbfsAPI) Read(path string, offset, length int64) (DbfsReadResponse, error) {
var readResponseBase64 struct {
BytesRead int64 `json:"bytes_read,omitempty" url:"bytes_read,omitempty"`
Data string `json:"data,omitempty" url:"data,omitempty"`
}
var readResponse DbfsReadResponse
data := struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
Offset int64 `json:"offset,omitempty" url:"offset,omitempty"`
Length int64 `json:"length,omitempty" url:"length,omitempty"`
}{
path,
offset,
length,
}
resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/read", data, nil)
if err != nil {
return readResponse, err
}
err = json.Unmarshal(resp, &readResponseBase64)
if err != nil {
return readResponse, err
}
readResponse.BytesRead = readResponseBase64.BytesRead
readResponse.Data, err = base64.StdEncoding.DecodeString(readResponseBase64.Data)
return readResponse, err
}
package aws
import (
"encoding/base64"
"encoding/json"
"net/http"
"github.com/xinsnake/databricks-sdk-golang/aws/models"
)
// DbfsAPI exposes the DBFS API
type DbfsAPI struct {
Client DBClient
}
func (a DbfsAPI) init(client DBClient) DbfsAPI {
a.Client = client
return a
}
// AddBlock appends a block of data to the stream specified by the input handle
func (a DbfsAPI) AddBlock(handle int64, data []byte) error {
data2 := struct {
Handle int64 `json:"handle,omitempty" url:"handle,omitempty"`
Data string `json:"data,omitempty" url:"data,omitempty"`
}{
handle,
base64.StdEncoding.EncodeToString(data),
}
_, err := a.Client.performQuery(http.MethodPost, "/dbfs/add-block", data2, nil)
return err
}
// Close closes the stream specified by the input handle
func (a DbfsAPI) Close(handle int64) error {
data := struct {
Handle int64 `json:"handle,omitempty" url:"handle,omitempty"`
}{
handle,
}
_, err := a.Client.performQuery(http.MethodPost, "/dbfs/close", data, nil)
return err
}
// DbfsCreateResponse is the response from Create
type DbfsCreateResponse struct {
Handle int64 `json:"handle,omitempty" url:"handle,omitempty"`
}
// Create opens a stream to write to a file and returns a handle to this stream
func (a DbfsAPI) Create(path string, overwrite bool) (DbfsCreateResponse, error) {
var createResponse DbfsCreateResponse
data := struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
Overwrite bool `json:"overwrite,omitempty" url:"overwrite,omitempty"`
}{
path,
overwrite,
}
resp, err := a.Client.performQuery(http.MethodPost, "/dbfs/create", data, nil)
if err != nil {
return createResponse, err
}
err = json.Unmarshal(resp, &createResponse)
return createResponse, err
}
// Delete deletes the file or directory (optionally recursively delete all files in the directory)
func (a DbfsAPI) Delete(path string, recursive bool) error {
data := struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
Recursive bool `json:"recursive,omitempty" url:"recursive,omitempty"`
}{
path,
recursive,
}
_, err := a.Client.performQuery(http.MethodPost, "/dbfs/delete", data, nil)
return err
}
// GetStatus gets the file information of a file or directory
func (a DbfsAPI) GetStatus(path string) (models.FileInfo, error) {
var fileInfo models.FileInfo
data := struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
}{
path,
}
resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/get-status", data, nil)
if err != nil {
return fileInfo, err
}
err = json.Unmarshal(resp, &fileInfo)
return fileInfo, err
}
// DbfsListResponse is a list of FileInfo as a response of List
type DbfsListResponse struct {
Files []models.FileInfo `json:"files,omitempty" url:"files,omitempty"`
}
// List lists the contents of a directory, or details of the file
func (a DbfsAPI) List(path string) ([]models.FileInfo, error) {
var listResponse DbfsListResponse
data := struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
}{
path,
}
resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/list", data, nil)
if err != nil {
return listResponse.Files, err
}
err = json.Unmarshal(resp, &listResponse)
return listResponse.Files, err
}
// Mkdirs creates the given directory and necessary parent directories if they do not exist
func (a DbfsAPI) Mkdirs(path string) error {
data := struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
}{
path,
}
_, err := a.Client.performQuery(http.MethodPost, "/dbfs/mkdirs", data, nil)
return err
}
// Move moves a file from one location to another location within DBFS
func (a DbfsAPI) Move(sourcePath, destinationPath string) error {
data := struct {
SourcePath string `json:"source_path,omitempty" url:"source_path,omitempty"`
DestinationPath string `json:"destination_path,omitempty" url:"destination_path,omitempty"`
}{
sourcePath,
destinationPath,
}
_, err := a.Client.performQuery(http.MethodPost, "/dbfs/move", data, nil)
return err
}
// Put uploads a file through the use of multipart form post
func (a DbfsAPI) Put(path string, contents []byte, overwrite bool) error {
data := struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
Contents string `json:"contents,omitempty" url:"contents,omitempty"`
Overwrite bool `json:"overwrite,omitempty" url:"overwrite,omitempty"`
}{
path,
base64.StdEncoding.EncodeToString(contents),
overwrite,
}
_, err := a.Client.performQuery(http.MethodPost, "/dbfs/put", data, nil)
return err
}
// DbfsReadResponse is the response of reading a file
type DbfsReadResponse struct {
BytesRead int64 `json:"bytes_read,omitempty" url:"bytes_read,omitempty"`
Data []byte `json:"data,omitempty" url:"data,omitempty"`
}
// Read returns the contents of a file
func (a DbfsAPI) Read(path string, offset, length int64) (DbfsReadResponse, error) {
var readResponseBase64 struct {
BytesRead int64 `json:"bytes_read,omitempty" url:"bytes_read,omitempty"`
Data string `json:"data,omitempty" url:"data,omitempty"`
}
var readResponse DbfsReadResponse
data := struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
Offset int64 `json:"offset,omitempty" url:"offset,omitempty"`
Length int64 `json:"length,omitempty" url:"length,omitempty"`
}{
path,
offset,
length,
}
resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/read", data, nil)
if err != nil {
return readResponse, err
}
err = json.Unmarshal(resp, &readResponseBase64)
if err != nil {
return readResponse, err
}
readResponse.BytesRead = readResponseBase64.BytesRead
readResponse.Data, err = base64.StdEncoding.DecodeString(readResponseBase64.Data)
return readResponse, err
}

View File

@ -1,396 +1,396 @@
// +build !ignore_autogenerated
// Code generated by deepcopy-gen. DO NOT EDIT.
package aws
import (
models "github.com/xinsnake/databricks-sdk-golang/aws/models"
)
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ClustersAPI) DeepCopyInto(out *ClustersAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClustersAPI.
func (in *ClustersAPI) DeepCopy() *ClustersAPI {
if in == nil {
return nil
}
out := new(ClustersAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ClustersEventsResponse) DeepCopyInto(out *ClustersEventsResponse) {
*out = *in
if in.Events != nil {
in, out := &in.Events, &out.Events
*out = make([]models.ClusterEvent, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
out.NextPage = in.NextPage
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClustersEventsResponse.
func (in *ClustersEventsResponse) DeepCopy() *ClustersEventsResponse {
if in == nil {
return nil
}
out := new(ClustersEventsResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ClustersListZonesResponse) DeepCopyInto(out *ClustersListZonesResponse) {
*out = *in
if in.Zones != nil {
in, out := &in.Zones, &out.Zones
*out = make([]string, len(*in))
copy(*out, *in)
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClustersListZonesResponse.
func (in *ClustersListZonesResponse) DeepCopy() *ClustersListZonesResponse {
if in == nil {
return nil
}
out := new(ClustersListZonesResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *DBClient) DeepCopyInto(out *DBClient) {
*out = *in
in.Option.DeepCopyInto(&out.Option)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DBClient.
func (in *DBClient) DeepCopy() *DBClient {
if in == nil {
return nil
}
out := new(DBClient)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *DbfsAPI) DeepCopyInto(out *DbfsAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsAPI.
func (in *DbfsAPI) DeepCopy() *DbfsAPI {
if in == nil {
return nil
}
out := new(DbfsAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *DbfsCreateResponse) DeepCopyInto(out *DbfsCreateResponse) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsCreateResponse.
func (in *DbfsCreateResponse) DeepCopy() *DbfsCreateResponse {
if in == nil {
return nil
}
out := new(DbfsCreateResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *DbfsListResponse) DeepCopyInto(out *DbfsListResponse) {
*out = *in
if in.Files != nil {
in, out := &in.Files, &out.Files
*out = make([]models.FileInfo, len(*in))
copy(*out, *in)
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsListResponse.
func (in *DbfsListResponse) DeepCopy() *DbfsListResponse {
if in == nil {
return nil
}
out := new(DbfsListResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *DbfsReadResponse) DeepCopyInto(out *DbfsReadResponse) {
*out = *in
if in.Data != nil {
in, out := &in.Data, &out.Data
*out = make([]byte, len(*in))
copy(*out, *in)
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsReadResponse.
func (in *DbfsReadResponse) DeepCopy() *DbfsReadResponse {
if in == nil {
return nil
}
out := new(DbfsReadResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *GroupsAPI) DeepCopyInto(out *GroupsAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GroupsAPI.
func (in *GroupsAPI) DeepCopy() *GroupsAPI {
if in == nil {
return nil
}
out := new(GroupsAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *GroupsCreateResponse) DeepCopyInto(out *GroupsCreateResponse) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GroupsCreateResponse.
func (in *GroupsCreateResponse) DeepCopy() *GroupsCreateResponse {
if in == nil {
return nil
}
out := new(GroupsCreateResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *InstanceProfilesAPI) DeepCopyInto(out *InstanceProfilesAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstanceProfilesAPI.
func (in *InstanceProfilesAPI) DeepCopy() *InstanceProfilesAPI {
if in == nil {
return nil
}
out := new(InstanceProfilesAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *JobsAPI) DeepCopyInto(out *JobsAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsAPI.
func (in *JobsAPI) DeepCopy() *JobsAPI {
if in == nil {
return nil
}
out := new(JobsAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *JobsRunsGetOutputResponse) DeepCopyInto(out *JobsRunsGetOutputResponse) {
*out = *in
out.NotebookOutput = in.NotebookOutput
in.Metadata.DeepCopyInto(&out.Metadata)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsRunsGetOutputResponse.
func (in *JobsRunsGetOutputResponse) DeepCopy() *JobsRunsGetOutputResponse {
if in == nil {
return nil
}
out := new(JobsRunsGetOutputResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *JobsRunsListResponse) DeepCopyInto(out *JobsRunsListResponse) {
*out = *in
if in.Runs != nil {
in, out := &in.Runs, &out.Runs
*out = make([]models.Run, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsRunsListResponse.
func (in *JobsRunsListResponse) DeepCopy() *JobsRunsListResponse {
if in == nil {
return nil
}
out := new(JobsRunsListResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *LibrariesAPI) DeepCopyInto(out *LibrariesAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LibrariesAPI.
func (in *LibrariesAPI) DeepCopy() *LibrariesAPI {
if in == nil {
return nil
}
out := new(LibrariesAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *LibrariesClusterStatusResponse) DeepCopyInto(out *LibrariesClusterStatusResponse) {
*out = *in
if in.LibraryStatuses != nil {
in, out := &in.LibraryStatuses, &out.LibraryStatuses
*out = make([]models.LibraryFullStatus, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LibrariesClusterStatusResponse.
func (in *LibrariesClusterStatusResponse) DeepCopy() *LibrariesClusterStatusResponse {
if in == nil {
return nil
}
out := new(LibrariesClusterStatusResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ScimAPI) DeepCopyInto(out *ScimAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScimAPI.
func (in *ScimAPI) DeepCopy() *ScimAPI {
if in == nil {
return nil
}
out := new(ScimAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *SecretsAPI) DeepCopyInto(out *SecretsAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecretsAPI.
func (in *SecretsAPI) DeepCopy() *SecretsAPI {
if in == nil {
return nil
}
out := new(SecretsAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *TokenAPI) DeepCopyInto(out *TokenAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TokenAPI.
func (in *TokenAPI) DeepCopy() *TokenAPI {
if in == nil {
return nil
}
out := new(TokenAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *TokenCreateResponse) DeepCopyInto(out *TokenCreateResponse) {
*out = *in
out.TokenInfo = in.TokenInfo
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TokenCreateResponse.
func (in *TokenCreateResponse) DeepCopy() *TokenCreateResponse {
if in == nil {
return nil
}
out := new(TokenCreateResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *WorkspaceAPI) DeepCopyInto(out *WorkspaceAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkspaceAPI.
func (in *WorkspaceAPI) DeepCopy() *WorkspaceAPI {
if in == nil {
return nil
}
out := new(WorkspaceAPI)
in.DeepCopyInto(out)
return out
}
// +build !ignore_autogenerated
// Code generated by deepcopy-gen. DO NOT EDIT.
package aws
import (
models "github.com/xinsnake/databricks-sdk-golang/aws/models"
)
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ClustersAPI) DeepCopyInto(out *ClustersAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClustersAPI.
func (in *ClustersAPI) DeepCopy() *ClustersAPI {
if in == nil {
return nil
}
out := new(ClustersAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ClustersEventsResponse) DeepCopyInto(out *ClustersEventsResponse) {
*out = *in
if in.Events != nil {
in, out := &in.Events, &out.Events
*out = make([]models.ClusterEvent, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
out.NextPage = in.NextPage
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClustersEventsResponse.
func (in *ClustersEventsResponse) DeepCopy() *ClustersEventsResponse {
if in == nil {
return nil
}
out := new(ClustersEventsResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ClustersListZonesResponse) DeepCopyInto(out *ClustersListZonesResponse) {
*out = *in
if in.Zones != nil {
in, out := &in.Zones, &out.Zones
*out = make([]string, len(*in))
copy(*out, *in)
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClustersListZonesResponse.
func (in *ClustersListZonesResponse) DeepCopy() *ClustersListZonesResponse {
if in == nil {
return nil
}
out := new(ClustersListZonesResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *DBClient) DeepCopyInto(out *DBClient) {
*out = *in
in.Option.DeepCopyInto(&out.Option)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DBClient.
func (in *DBClient) DeepCopy() *DBClient {
if in == nil {
return nil
}
out := new(DBClient)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *DbfsAPI) DeepCopyInto(out *DbfsAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsAPI.
func (in *DbfsAPI) DeepCopy() *DbfsAPI {
if in == nil {
return nil
}
out := new(DbfsAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *DbfsCreateResponse) DeepCopyInto(out *DbfsCreateResponse) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsCreateResponse.
func (in *DbfsCreateResponse) DeepCopy() *DbfsCreateResponse {
if in == nil {
return nil
}
out := new(DbfsCreateResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *DbfsListResponse) DeepCopyInto(out *DbfsListResponse) {
*out = *in
if in.Files != nil {
in, out := &in.Files, &out.Files
*out = make([]models.FileInfo, len(*in))
copy(*out, *in)
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsListResponse.
func (in *DbfsListResponse) DeepCopy() *DbfsListResponse {
if in == nil {
return nil
}
out := new(DbfsListResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *DbfsReadResponse) DeepCopyInto(out *DbfsReadResponse) {
*out = *in
if in.Data != nil {
in, out := &in.Data, &out.Data
*out = make([]byte, len(*in))
copy(*out, *in)
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DbfsReadResponse.
func (in *DbfsReadResponse) DeepCopy() *DbfsReadResponse {
if in == nil {
return nil
}
out := new(DbfsReadResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *GroupsAPI) DeepCopyInto(out *GroupsAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GroupsAPI.
func (in *GroupsAPI) DeepCopy() *GroupsAPI {
if in == nil {
return nil
}
out := new(GroupsAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *GroupsCreateResponse) DeepCopyInto(out *GroupsCreateResponse) {
*out = *in
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GroupsCreateResponse.
func (in *GroupsCreateResponse) DeepCopy() *GroupsCreateResponse {
if in == nil {
return nil
}
out := new(GroupsCreateResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *InstanceProfilesAPI) DeepCopyInto(out *InstanceProfilesAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstanceProfilesAPI.
func (in *InstanceProfilesAPI) DeepCopy() *InstanceProfilesAPI {
if in == nil {
return nil
}
out := new(InstanceProfilesAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *JobsAPI) DeepCopyInto(out *JobsAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsAPI.
func (in *JobsAPI) DeepCopy() *JobsAPI {
if in == nil {
return nil
}
out := new(JobsAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *JobsRunsGetOutputResponse) DeepCopyInto(out *JobsRunsGetOutputResponse) {
*out = *in
out.NotebookOutput = in.NotebookOutput
in.Metadata.DeepCopyInto(&out.Metadata)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsRunsGetOutputResponse.
func (in *JobsRunsGetOutputResponse) DeepCopy() *JobsRunsGetOutputResponse {
if in == nil {
return nil
}
out := new(JobsRunsGetOutputResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *JobsRunsListResponse) DeepCopyInto(out *JobsRunsListResponse) {
*out = *in
if in.Runs != nil {
in, out := &in.Runs, &out.Runs
*out = make([]models.Run, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsRunsListResponse.
func (in *JobsRunsListResponse) DeepCopy() *JobsRunsListResponse {
if in == nil {
return nil
}
out := new(JobsRunsListResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *LibrariesAPI) DeepCopyInto(out *LibrariesAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LibrariesAPI.
func (in *LibrariesAPI) DeepCopy() *LibrariesAPI {
if in == nil {
return nil
}
out := new(LibrariesAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *LibrariesClusterStatusResponse) DeepCopyInto(out *LibrariesClusterStatusResponse) {
*out = *in
if in.LibraryStatuses != nil {
in, out := &in.LibraryStatuses, &out.LibraryStatuses
*out = make([]models.LibraryFullStatus, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LibrariesClusterStatusResponse.
func (in *LibrariesClusterStatusResponse) DeepCopy() *LibrariesClusterStatusResponse {
if in == nil {
return nil
}
out := new(LibrariesClusterStatusResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ScimAPI) DeepCopyInto(out *ScimAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScimAPI.
func (in *ScimAPI) DeepCopy() *ScimAPI {
if in == nil {
return nil
}
out := new(ScimAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *SecretsAPI) DeepCopyInto(out *SecretsAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecretsAPI.
func (in *SecretsAPI) DeepCopy() *SecretsAPI {
if in == nil {
return nil
}
out := new(SecretsAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *TokenAPI) DeepCopyInto(out *TokenAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TokenAPI.
func (in *TokenAPI) DeepCopy() *TokenAPI {
if in == nil {
return nil
}
out := new(TokenAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *TokenCreateResponse) DeepCopyInto(out *TokenCreateResponse) {
*out = *in
out.TokenInfo = in.TokenInfo
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TokenCreateResponse.
func (in *TokenCreateResponse) DeepCopy() *TokenCreateResponse {
if in == nil {
return nil
}
out := new(TokenCreateResponse)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *WorkspaceAPI) DeepCopyInto(out *WorkspaceAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkspaceAPI.
func (in *WorkspaceAPI) DeepCopy() *WorkspaceAPI {
if in == nil {
return nil
}
out := new(WorkspaceAPI)
in.DeepCopyInto(out)
return out
}

View File

@ -1,2 +1,2 @@
// +k8s:deepcopy-gen=package
package aws
// +k8s:deepcopy-gen=package
package aws

View File

@ -1,139 +1,139 @@
package aws
import (
"encoding/json"
"net/http"
"github.com/xinsnake/databricks-sdk-golang/aws/models"
)
// GroupsAPI exposes the Groups API
type GroupsAPI struct {
Client DBClient
}
func (a GroupsAPI) init(client DBClient) GroupsAPI {
a.Client = client
return a
}
// AddMember adds a user or group to a group
func (a GroupsAPI) AddMember(principalName models.PrincipalName, parentName string) error {
data := struct {
UserName string `json:"user_name,omitempty" url:"user_name,omitempty"`
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
ParentName string `json:"parent_name,omitempty" url:"parent_name,omitempty"`
}{
principalName.UserName,
principalName.GroupName,
parentName,
}
_, err := a.Client.performQuery(http.MethodPost, "/groups/add-member", data, nil)
return err
}
// GroupsCreateResponse is a response with group name for Create
type GroupsCreateResponse struct {
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
}
// Create creates a new group with the given name
func (a GroupsAPI) Create(groupName string) (GroupsCreateResponse, error) {
var createResponse GroupsCreateResponse
data := struct {
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
}{
groupName,
}
resp, err := a.Client.performQuery(http.MethodPost, "/groups/create", data, nil)
if err != nil {
return createResponse, err
}
err = json.Unmarshal(resp, &createResponse)
return createResponse, err
}
// ListMembers returns all of the members of a particular group
func (a GroupsAPI) ListMembers(groupName string) ([]models.PrincipalName, error) {
var membersResponse struct {
Members []models.PrincipalName `json:"members,omitempty" url:"members,omitempty"`
}
data := struct {
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
}{
groupName,
}
resp, err := a.Client.performQuery(http.MethodGet, "/groups/list-members", data, nil)
if err != nil {
return membersResponse.Members, err
}
err = json.Unmarshal(resp, &membersResponse)
return membersResponse.Members, err
}
// List returns all of the groups in an organization
func (a GroupsAPI) List() ([]string, error) {
var listResponse struct {
GroupNames []string `json:"group_names,omitempty" url:"group_names,omitempty"`
}
resp, err := a.Client.performQuery(http.MethodGet, "/groups/list", nil, nil)
if err != nil {
return listResponse.GroupNames, err
}
err = json.Unmarshal(resp, &listResponse)
return listResponse.GroupNames, err
}
// ListParents retrieves all groups in which a given user or group is a member
func (a GroupsAPI) ListParents(principalName models.PrincipalName) ([]string, error) {
var listParentsResponse struct {
GroupNames []string `json:"group_names,omitempty" url:"group_names,omitempty"`
}
data := struct {
UserName string `json:"user_name,omitempty" url:"user_name,omitempty"`
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
}{
principalName.UserName,
principalName.GroupName,
}
resp, err := a.Client.performQuery(http.MethodGet, "/groups/list-members", data, nil)
if err != nil {
return listParentsResponse.GroupNames, err
}
err = json.Unmarshal(resp, &listParentsResponse)
return listParentsResponse.GroupNames, err
}
// RemoveMember removes a user or group from a group
func (a GroupsAPI) RemoveMember(principalName models.PrincipalName, parentName string) error {
data := struct {
UserName string `json:"user_name,omitempty" url:"user_name,omitempty"`
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
ParentName string `json:"parent_name,omitempty" url:"parent_name,omitempty"`
}{
principalName.UserName,
principalName.GroupName,
parentName,
}
_, err := a.Client.performQuery(http.MethodPost, "/groups/remove-member", data, nil)
return err
}
// Delete removes a group from this organization
func (a GroupsAPI) Delete(groupName string) error {
data := struct {
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
}{
groupName,
}
_, err := a.Client.performQuery(http.MethodPost, "/groups/delete", data, nil)
return err
}
package aws
import (
"encoding/json"
"net/http"
"github.com/xinsnake/databricks-sdk-golang/aws/models"
)
// GroupsAPI exposes the Groups API
type GroupsAPI struct {
Client DBClient
}
func (a GroupsAPI) init(client DBClient) GroupsAPI {
a.Client = client
return a
}
// AddMember adds a user or group to a group
func (a GroupsAPI) AddMember(principalName models.PrincipalName, parentName string) error {
data := struct {
UserName string `json:"user_name,omitempty" url:"user_name,omitempty"`
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
ParentName string `json:"parent_name,omitempty" url:"parent_name,omitempty"`
}{
principalName.UserName,
principalName.GroupName,
parentName,
}
_, err := a.Client.performQuery(http.MethodPost, "/groups/add-member", data, nil)
return err
}
// GroupsCreateResponse is a response with group name for Create
type GroupsCreateResponse struct {
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
}
// Create creates a new group with the given name
func (a GroupsAPI) Create(groupName string) (GroupsCreateResponse, error) {
var createResponse GroupsCreateResponse
data := struct {
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
}{
groupName,
}
resp, err := a.Client.performQuery(http.MethodPost, "/groups/create", data, nil)
if err != nil {
return createResponse, err
}
err = json.Unmarshal(resp, &createResponse)
return createResponse, err
}
// ListMembers returns all of the members of a particular group
func (a GroupsAPI) ListMembers(groupName string) ([]models.PrincipalName, error) {
var membersResponse struct {
Members []models.PrincipalName `json:"members,omitempty" url:"members,omitempty"`
}
data := struct {
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
}{
groupName,
}
resp, err := a.Client.performQuery(http.MethodGet, "/groups/list-members", data, nil)
if err != nil {
return membersResponse.Members, err
}
err = json.Unmarshal(resp, &membersResponse)
return membersResponse.Members, err
}
// List returns all of the groups in an organization
func (a GroupsAPI) List() ([]string, error) {
var listResponse struct {
GroupNames []string `json:"group_names,omitempty" url:"group_names,omitempty"`
}
resp, err := a.Client.performQuery(http.MethodGet, "/groups/list", nil, nil)
if err != nil {
return listResponse.GroupNames, err
}
err = json.Unmarshal(resp, &listResponse)
return listResponse.GroupNames, err
}
// ListParents retrieves all groups in which a given user or group is a member
func (a GroupsAPI) ListParents(principalName models.PrincipalName) ([]string, error) {
var listParentsResponse struct {
GroupNames []string `json:"group_names,omitempty" url:"group_names,omitempty"`
}
data := struct {
UserName string `json:"user_name,omitempty" url:"user_name,omitempty"`
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
}{
principalName.UserName,
principalName.GroupName,
}
resp, err := a.Client.performQuery(http.MethodGet, "/groups/list-members", data, nil)
if err != nil {
return listParentsResponse.GroupNames, err
}
err = json.Unmarshal(resp, &listParentsResponse)
return listParentsResponse.GroupNames, err
}
// RemoveMember removes a user or group from a group
func (a GroupsAPI) RemoveMember(principalName models.PrincipalName, parentName string) error {
data := struct {
UserName string `json:"user_name,omitempty" url:"user_name,omitempty"`
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
ParentName string `json:"parent_name,omitempty" url:"parent_name,omitempty"`
}{
principalName.UserName,
principalName.GroupName,
parentName,
}
_, err := a.Client.performQuery(http.MethodPost, "/groups/remove-member", data, nil)
return err
}
// Delete removes a group from this organization
func (a GroupsAPI) Delete(groupName string) error {
data := struct {
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
}{
groupName,
}
_, err := a.Client.performQuery(http.MethodPost, "/groups/delete", data, nil)
return err
}

View File

@ -1,57 +1,57 @@
package aws
import (
"encoding/json"
"net/http"
"github.com/xinsnake/databricks-sdk-golang/aws/models"
)
// InstanceProfilesAPI exposes the Instance Profiles API
type InstanceProfilesAPI struct {
Client DBClient
}
func (a InstanceProfilesAPI) init(client DBClient) InstanceProfilesAPI {
a.Client = client
return a
}
// Add registers an instance profile in Databricks
func (a InstanceProfilesAPI) Add(instanceProfileArn string, skipValidation bool) error {
data := struct {
InstanceProfileArn string `json:"instance_profile_arn,omitempty" url:"instance_profile_arn,omitempty"`
SkipValidation bool `json:"skip_validation,omitempty" url:"skip_validation,omitempty"`
}{
instanceProfileArn,
skipValidation,
}
_, err := a.Client.performQuery(http.MethodPost, "/instance-profiles/add", data, nil)
return err
}
// List lists the instance profiles that the calling user can use to launch a cluster
func (a InstanceProfilesAPI) List() ([]models.InstanceProfile, error) {
var listResponse struct {
InstanceProfiles []models.InstanceProfile `json:"instance_profiles,omitempty" url:"instance_profiles,omitempty"`
}
resp, err := a.Client.performQuery(http.MethodGet, "/instance-profiles/list", nil, nil)
if err != nil {
return listResponse.InstanceProfiles, err
}
err = json.Unmarshal(resp, &listResponse)
return listResponse.InstanceProfiles, err
}
// Remove removes the instance profile with the provided ARN
func (a InstanceProfilesAPI) Remove(instanceProfileArn string) error {
data := struct {
InstanceProfileArn string `json:"instance_profile_arn,omitempty" url:"instance_profile_arn,omitempty"`
}{
instanceProfileArn,
}
_, err := a.Client.performQuery(http.MethodPost, "/instance-profiles/remove", data, nil)
return err
}
package aws
import (
"encoding/json"
"net/http"
"github.com/xinsnake/databricks-sdk-golang/aws/models"
)
// InstanceProfilesAPI exposes the Instance Profiles API
type InstanceProfilesAPI struct {
Client DBClient
}
func (a InstanceProfilesAPI) init(client DBClient) InstanceProfilesAPI {
a.Client = client
return a
}
// Add registers an instance profile in Databricks
func (a InstanceProfilesAPI) Add(instanceProfileArn string, skipValidation bool) error {
data := struct {
InstanceProfileArn string `json:"instance_profile_arn,omitempty" url:"instance_profile_arn,omitempty"`
SkipValidation bool `json:"skip_validation,omitempty" url:"skip_validation,omitempty"`
}{
instanceProfileArn,
skipValidation,
}
_, err := a.Client.performQuery(http.MethodPost, "/instance-profiles/add", data, nil)
return err
}
// List lists the instance profiles that the calling user can use to launch a cluster
func (a InstanceProfilesAPI) List() ([]models.InstanceProfile, error) {
var listResponse struct {
InstanceProfiles []models.InstanceProfile `json:"instance_profiles,omitempty" url:"instance_profiles,omitempty"`
}
resp, err := a.Client.performQuery(http.MethodGet, "/instance-profiles/list", nil, nil)
if err != nil {
return listResponse.InstanceProfiles, err
}
err = json.Unmarshal(resp, &listResponse)
return listResponse.InstanceProfiles, err
}
// Remove removes the instance profile with the provided ARN
func (a InstanceProfilesAPI) Remove(instanceProfileArn string) error {
data := struct {
InstanceProfileArn string `json:"instance_profile_arn,omitempty" url:"instance_profile_arn,omitempty"`
}{
instanceProfileArn,
}
_, err := a.Client.performQuery(http.MethodPost, "/instance-profiles/remove", data, nil)
return err
}

View File

@ -1,249 +1,249 @@
package aws
import (
"encoding/json"
"net/http"
"github.com/xinsnake/databricks-sdk-golang/aws/models"
)
// JobsAPI exposes Jobs API endpoints
type JobsAPI struct {
Client DBClient
}
func (a JobsAPI) init(client DBClient) JobsAPI {
a.Client = client
return a
}
// Create creates a new job
func (a JobsAPI) Create(jobSettings models.JobSettings) (models.Job, error) {
var job models.Job
resp, err := a.Client.performQuery(http.MethodPost, "/jobs/create", jobSettings, nil)
if err != nil {
return job, err
}
err = json.Unmarshal(resp, &job)
return job, err
}
// List lists all jobs
func (a JobsAPI) List() ([]models.Job, error) {
var jobsList = struct {
Jobs []models.Job `json:"jobs,omitempty" url:"jobs,omitempty"`
}{}
resp, err := a.Client.performQuery(http.MethodGet, "/jobs/list", nil, nil)
if err != nil {
return jobsList.Jobs, err
}
err = json.Unmarshal(resp, &jobsList)
return jobsList.Jobs, err
}
// Delete deletes a job by ID
func (a JobsAPI) Delete(jobID int64) error {
data := struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
}{
jobID,
}
_, err := a.Client.performQuery(http.MethodPost, "/jobs/delete", data, nil)
return err
}
// Get gets a job by ID
func (a JobsAPI) Get(jobID int64) (models.Job, error) {
var job models.Job
data := struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
}{
jobID,
}
resp, err := a.Client.performQuery(http.MethodGet, "/jobs/get", data, nil)
if err != nil {
return job, err
}
err = json.Unmarshal(resp, &job)
return job, err
}
// Reset overwrites job settings
func (a JobsAPI) Reset(jobID int64, jobSettings models.JobSettings) error {
data := struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
NewSettings models.JobSettings `json:"new_settings,omitempty" url:"new_settings,omitempty"`
}{
jobID,
jobSettings,
}
_, err := a.Client.performQuery(http.MethodPost, "/jobs/reset", data, nil)
return err
}
// RunNow runs a job now and return the run_id of the triggered run
func (a JobsAPI) RunNow(jobID int64, runParameters models.RunParameters) (models.Run, error) {
var run models.Run
data := struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
models.RunParameters
}{
jobID,
runParameters,
}
resp, err := a.Client.performQuery(http.MethodPost, "/jobs/run-now", data, nil)
if err != nil {
return run, err
}
err = json.Unmarshal(resp, &run)
return run, err
}
// RunsSubmit submit a one-time run
func (a JobsAPI) RunsSubmit(runName string, clusterSpec models.ClusterSpec, jobTask models.JobTask, timeoutSeconds int32) (models.Run, error) {
var run models.Run
data := struct {
RunName string `json:"run_name,omitempty" url:"run_name,omitempty"`
models.ClusterSpec
models.JobTask
TimeoutSeconds int32 `json:"timeout_seconds,omitempty" url:"timeout_seconds,omitempty"`
}{
runName,
clusterSpec,
jobTask,
timeoutSeconds,
}
resp, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/submit", data, nil)
if err != nil {
return run, err
}
err = json.Unmarshal(resp, &run)
return run, err
}
// JobsRunsListResponse is a bit special because it has a HasMore field
type JobsRunsListResponse struct {
Runs []models.Run `json:"runs,omitempty" url:"runs,omitempty"`
HasMore bool `json:"has_more,omitempty" url:"has_more,omitempty"`
}
// RunsList lists runs from most recently started to least
func (a JobsAPI) RunsList(activeOnly, completedOnly bool, jobID int64, offset, limit int32) (JobsRunsListResponse, error) {
var runlistResponse JobsRunsListResponse
data := struct {
ActiveOnly bool `json:"active_only,omitempty" url:"active_only,omitempty"`
CompletedOnly bool `json:"completed_only,omitempty" url:"completed_only,omitempty"`
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
Offset int32 `json:"offset,omitempty" url:"offset,omitempty"`
Limit int32 `json:"limit,omitempty" url:"limit,omitempty"`
}{
activeOnly,
completedOnly,
jobID,
offset,
limit,
}
resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/list", data, nil)
if err != nil {
return runlistResponse, err
}
err = json.Unmarshal(resp, &runlistResponse)
return runlistResponse, err
}
// RunsGet retrieve the metadata of a run
func (a JobsAPI) RunsGet(runID int64) (models.Run, error) {
var run models.Run
data := struct {
RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"`
}{
runID,
}
resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/get", data, nil)
if err != nil {
return run, err
}
err = json.Unmarshal(resp, &run)
return run, err
}
// RunsExport exports and retrieve the job run task
func (a JobsAPI) RunsExport(runID int64) ([]models.ViewItem, error) {
var viewItemsView = struct {
Views []models.ViewItem `json:"views,omitempty" url:"views,omitempty"`
}{}
data := struct {
RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"`
}{
runID,
}
resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/export", data, nil)
if err != nil {
return viewItemsView.Views, err
}
err = json.Unmarshal(resp, &viewItemsView)
return viewItemsView.Views, err
}
// RunsCancel cancels a run
func (a JobsAPI) RunsCancel(runID int64) error {
data := struct {
RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"`
}{
runID,
}
_, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/cancel", data, nil)
return err
}
// JobsRunsGetOutputResponse is the output of the run
type JobsRunsGetOutputResponse struct {
NotebookOutput models.NotebookOutput `json:"notebook_output,omitempty" url:"notebook_output,omitempty"`
Error string `json:"error,omitempty" url:"error,omitempty"`
Metadata models.Run `json:"metadata,omitempty" url:"metadata,omitempty"`
}
// RunsGetOutput retrieves the output of a run
func (a JobsAPI) RunsGetOutput(runID int64) (JobsRunsGetOutputResponse, error) {
var runsGetOutputResponse JobsRunsGetOutputResponse
data := struct {
RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"`
}{
runID,
}
resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/get-output", data, nil)
if err != nil {
return runsGetOutputResponse, err
}
err = json.Unmarshal(resp, &runsGetOutputResponse)
return runsGetOutputResponse, err
}
// RunsDelete deletes a non-active run. Returns an error if the run is active.
func (a JobsAPI) RunsDelete(runID int64) error {
data := struct {
RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"`
}{
runID,
}
_, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/delete", data, nil)
return err
}
package aws
import (
"encoding/json"
"net/http"
"github.com/xinsnake/databricks-sdk-golang/aws/models"
)
// JobsAPI exposes Jobs API endpoints
type JobsAPI struct {
Client DBClient
}
func (a JobsAPI) init(client DBClient) JobsAPI {
a.Client = client
return a
}
// Create creates a new job
func (a JobsAPI) Create(jobSettings models.JobSettings) (models.Job, error) {
var job models.Job
resp, err := a.Client.performQuery(http.MethodPost, "/jobs/create", jobSettings, nil)
if err != nil {
return job, err
}
err = json.Unmarshal(resp, &job)
return job, err
}
// List lists all jobs
func (a JobsAPI) List() ([]models.Job, error) {
var jobsList = struct {
Jobs []models.Job `json:"jobs,omitempty" url:"jobs,omitempty"`
}{}
resp, err := a.Client.performQuery(http.MethodGet, "/jobs/list", nil, nil)
if err != nil {
return jobsList.Jobs, err
}
err = json.Unmarshal(resp, &jobsList)
return jobsList.Jobs, err
}
// Delete deletes a job by ID
func (a JobsAPI) Delete(jobID int64) error {
data := struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
}{
jobID,
}
_, err := a.Client.performQuery(http.MethodPost, "/jobs/delete", data, nil)
return err
}
// Get gets a job by ID
func (a JobsAPI) Get(jobID int64) (models.Job, error) {
var job models.Job
data := struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
}{
jobID,
}
resp, err := a.Client.performQuery(http.MethodGet, "/jobs/get", data, nil)
if err != nil {
return job, err
}
err = json.Unmarshal(resp, &job)
return job, err
}
// Reset overwrites job settings
func (a JobsAPI) Reset(jobID int64, jobSettings models.JobSettings) error {
data := struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
NewSettings models.JobSettings `json:"new_settings,omitempty" url:"new_settings,omitempty"`
}{
jobID,
jobSettings,
}
_, err := a.Client.performQuery(http.MethodPost, "/jobs/reset", data, nil)
return err
}
// RunNow runs a job now and return the run_id of the triggered run
func (a JobsAPI) RunNow(jobID int64, runParameters models.RunParameters) (models.Run, error) {
var run models.Run
data := struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
models.RunParameters
}{
jobID,
runParameters,
}
resp, err := a.Client.performQuery(http.MethodPost, "/jobs/run-now", data, nil)
if err != nil {
return run, err
}
err = json.Unmarshal(resp, &run)
return run, err
}
// RunsSubmit submit a one-time run
func (a JobsAPI) RunsSubmit(runName string, clusterSpec models.ClusterSpec, jobTask models.JobTask, timeoutSeconds int32) (models.Run, error) {
var run models.Run
data := struct {
RunName string `json:"run_name,omitempty" url:"run_name,omitempty"`
models.ClusterSpec
models.JobTask
TimeoutSeconds int32 `json:"timeout_seconds,omitempty" url:"timeout_seconds,omitempty"`
}{
runName,
clusterSpec,
jobTask,
timeoutSeconds,
}
resp, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/submit", data, nil)
if err != nil {
return run, err
}
err = json.Unmarshal(resp, &run)
return run, err
}
// JobsRunsListResponse is a bit special because it has a HasMore field
type JobsRunsListResponse struct {
Runs []models.Run `json:"runs,omitempty" url:"runs,omitempty"`
HasMore bool `json:"has_more,omitempty" url:"has_more,omitempty"`
}
// RunsList lists runs from most recently started to least
func (a JobsAPI) RunsList(activeOnly, completedOnly bool, jobID int64, offset, limit int32) (JobsRunsListResponse, error) {
var runlistResponse JobsRunsListResponse
data := struct {
ActiveOnly bool `json:"active_only,omitempty" url:"active_only,omitempty"`
CompletedOnly bool `json:"completed_only,omitempty" url:"completed_only,omitempty"`
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
Offset int32 `json:"offset,omitempty" url:"offset,omitempty"`
Limit int32 `json:"limit,omitempty" url:"limit,omitempty"`
}{
activeOnly,
completedOnly,
jobID,
offset,
limit,
}
resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/list", data, nil)
if err != nil {
return runlistResponse, err
}
err = json.Unmarshal(resp, &runlistResponse)
return runlistResponse, err
}
// RunsGet retrieve the metadata of a run
func (a JobsAPI) RunsGet(runID int64) (models.Run, error) {
var run models.Run
data := struct {
RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"`
}{
runID,
}
resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/get", data, nil)
if err != nil {
return run, err
}
err = json.Unmarshal(resp, &run)
return run, err
}
// RunsExport exports and retrieve the job run task
func (a JobsAPI) RunsExport(runID int64) ([]models.ViewItem, error) {
var viewItemsView = struct {
Views []models.ViewItem `json:"views,omitempty" url:"views,omitempty"`
}{}
data := struct {
RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"`
}{
runID,
}
resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/export", data, nil)
if err != nil {
return viewItemsView.Views, err
}
err = json.Unmarshal(resp, &viewItemsView)
return viewItemsView.Views, err
}
// RunsCancel cancels a run
func (a JobsAPI) RunsCancel(runID int64) error {
data := struct {
RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"`
}{
runID,
}
_, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/cancel", data, nil)
return err
}
// JobsRunsGetOutputResponse is the output of the run
type JobsRunsGetOutputResponse struct {
NotebookOutput models.NotebookOutput `json:"notebook_output,omitempty" url:"notebook_output,omitempty"`
Error string `json:"error,omitempty" url:"error,omitempty"`
Metadata models.Run `json:"metadata,omitempty" url:"metadata,omitempty"`
}
// RunsGetOutput retrieves the output of a run
func (a JobsAPI) RunsGetOutput(runID int64) (JobsRunsGetOutputResponse, error) {
var runsGetOutputResponse JobsRunsGetOutputResponse
data := struct {
RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"`
}{
runID,
}
resp, err := a.Client.performQuery(http.MethodGet, "/jobs/runs/get-output", data, nil)
if err != nil {
return runsGetOutputResponse, err
}
err = json.Unmarshal(resp, &runsGetOutputResponse)
return runsGetOutputResponse, err
}
// RunsDelete deletes a non-active run. Returns an error if the run is active.
func (a JobsAPI) RunsDelete(runID int64) error {
data := struct {
RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"`
}{
runID,
}
_, err := a.Client.performQuery(http.MethodPost, "/jobs/runs/delete", data, nil)
return err
}

View File

@ -1,83 +1,83 @@
package aws
import (
"encoding/json"
"net/http"
"github.com/xinsnake/databricks-sdk-golang/aws/models"
)
// LibrariesAPI exposes the Libraries API
type LibrariesAPI struct {
Client DBClient
}
func (a LibrariesAPI) init(client DBClient) LibrariesAPI {
a.Client = client
return a
}
// AllClusterStatuses gets the status of all libraries on all clusters
func (a LibrariesAPI) AllClusterStatuses() ([]models.ClusterLibraryStatuses, error) {
var allClusterStatusesResponse struct {
Statuses []models.ClusterLibraryStatuses `json:"statuses,omitempty" url:"statuses,omitempty"`
}
resp, err := a.Client.performQuery(http.MethodGet, "/libraries/all-cluster-statuses", nil, nil)
if err != nil {
return allClusterStatusesResponse.Statuses, err
}
err = json.Unmarshal(resp, &allClusterStatusesResponse)
return allClusterStatusesResponse.Statuses, err
}
// LibrariesClusterStatusResponse is a response from AllClusterStatuses
type LibrariesClusterStatusResponse struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
LibraryStatuses []models.LibraryFullStatus `json:"library_statuses,omitempty" url:"library_statuses,omitempty"`
}
// ClusterStatus get the status of libraries on a cluster
func (a LibrariesAPI) ClusterStatus(clusterID string) (LibrariesClusterStatusResponse, error) {
var clusterStatusResponse LibrariesClusterStatusResponse
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}{
clusterID,
}
resp, err := a.Client.performQuery(http.MethodGet, "/libraries/cluster-status", data, nil)
if err != nil {
return clusterStatusResponse, err
}
err = json.Unmarshal(resp, &clusterStatusResponse)
return clusterStatusResponse, err
}
// Install installs libraries on a cluster
func (a LibrariesAPI) Install(clusterID string, libraries []models.Library) error {
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
Libraries []models.Library `json:"libraries,omitempty" url:"libraries,omitempty"`
}{
clusterID,
libraries,
}
_, err := a.Client.performQuery(http.MethodPost, "/libraries/install", data, nil)
return err
}
// Uninstall sets libraries to be uninstalled on a cluster
func (a LibrariesAPI) Uninstall(clusterID string, libraries []models.Library) error {
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
Libraries []models.Library `json:"libraries,omitempty" url:"libraries,omitempty"`
}{
clusterID,
libraries,
}
_, err := a.Client.performQuery(http.MethodPost, "/libraries/uninstall", data, nil)
return err
}
package aws
import (
"encoding/json"
"net/http"
"github.com/xinsnake/databricks-sdk-golang/aws/models"
)
// LibrariesAPI exposes the Libraries API
type LibrariesAPI struct {
Client DBClient
}
func (a LibrariesAPI) init(client DBClient) LibrariesAPI {
a.Client = client
return a
}
// AllClusterStatuses gets the status of all libraries on all clusters
func (a LibrariesAPI) AllClusterStatuses() ([]models.ClusterLibraryStatuses, error) {
var allClusterStatusesResponse struct {
Statuses []models.ClusterLibraryStatuses `json:"statuses,omitempty" url:"statuses,omitempty"`
}
resp, err := a.Client.performQuery(http.MethodGet, "/libraries/all-cluster-statuses", nil, nil)
if err != nil {
return allClusterStatusesResponse.Statuses, err
}
err = json.Unmarshal(resp, &allClusterStatusesResponse)
return allClusterStatusesResponse.Statuses, err
}
// LibrariesClusterStatusResponse is a response from AllClusterStatuses
type LibrariesClusterStatusResponse struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
LibraryStatuses []models.LibraryFullStatus `json:"library_statuses,omitempty" url:"library_statuses,omitempty"`
}
// ClusterStatus get the status of libraries on a cluster
func (a LibrariesAPI) ClusterStatus(clusterID string) (LibrariesClusterStatusResponse, error) {
var clusterStatusResponse LibrariesClusterStatusResponse
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
}{
clusterID,
}
resp, err := a.Client.performQuery(http.MethodGet, "/libraries/cluster-status", data, nil)
if err != nil {
return clusterStatusResponse, err
}
err = json.Unmarshal(resp, &clusterStatusResponse)
return clusterStatusResponse, err
}
// Install installs libraries on a cluster
func (a LibrariesAPI) Install(clusterID string, libraries []models.Library) error {
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
Libraries []models.Library `json:"libraries,omitempty" url:"libraries,omitempty"`
}{
clusterID,
libraries,
}
_, err := a.Client.performQuery(http.MethodPost, "/libraries/install", data, nil)
return err
}
// Uninstall sets libraries to be uninstalled on a cluster
func (a LibrariesAPI) Uninstall(clusterID string, libraries []models.Library) error {
data := struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
Libraries []models.Library `json:"libraries,omitempty" url:"libraries,omitempty"`
}{
clusterID,
libraries,
}
_, err := a.Client.performQuery(http.MethodPost, "/libraries/uninstall", data, nil)
return err
}

View File

@ -1,6 +1,6 @@
package models
type AclItem struct {
Principal string `json:"principal,omitempty" url:"principal,omitempty"`
Permission *AclPermission `json:"permission,omitempty" url:"permission,omitempty"`
}
package models
type AclItem struct {
Principal string `json:"principal,omitempty" url:"principal,omitempty"`
Permission *AclPermission `json:"permission,omitempty" url:"permission,omitempty"`
}

View File

@ -1,9 +1,9 @@
package models
type AclPermission string
const (
AclPermissionRead = "READ"
AclPermissionWrite = "WRITE"
AclPermissionManage = "MANAGE"
)
package models
type AclPermission string
const (
AclPermissionRead = "READ"
AclPermissionWrite = "WRITE"
AclPermissionManage = "MANAGE"
)

View File

@ -1,6 +1,6 @@
package models
type AutoScale struct {
MinWorkers int32 `json:"min_workers,omitempty" url:"min_workers,omitempty"`
MaxWorkers int32 `json:"max_workers,omitempty" url:"max_workers,omitempty"`
}
package models
type AutoScale struct {
MinWorkers int32 `json:"min_workers,omitempty" url:"min_workers,omitempty"`
MaxWorkers int32 `json:"max_workers,omitempty" url:"max_workers,omitempty"`
}

View File

@ -1,12 +1,12 @@
package models
type AwsAttributes struct {
FirstOnDemand int32 `json:"first_on_demand,omitempty" url:"first_on_demand,omitempty"`
Availability *AwsAvailability `json:"availability,omitempty" url:"availability,omitempty"`
ZoneID string `json:"zone_id,omitempty" url:"zone_id,omitempty"`
InstanceProfileArn string `json:"instance_profile_arn,omitempty" url:"instance_profile_arn,omitempty"`
SpotBidPricePercent int32 `json:"spot_bid_price_percent,omitempty" url:"spot_bid_price_percent,omitempty"`
EbsVolumeType *EbsVolumeType `json:"ebs_volume_type,omitempty" url:"ebs_volume_type,omitempty"`
EbsVolumeCount int32 `json:"ebs_volume_count,omitempty" url:"ebs_volume_count,omitempty"`
EbsVolumeSize int32 `json:"ebs_volume_size,omitempty" url:"ebs_volume_size,omitempty"`
}
package models
type AwsAttributes struct {
FirstOnDemand int32 `json:"first_on_demand,omitempty" url:"first_on_demand,omitempty"`
Availability *AwsAvailability `json:"availability,omitempty" url:"availability,omitempty"`
ZoneID string `json:"zone_id,omitempty" url:"zone_id,omitempty"`
InstanceProfileArn string `json:"instance_profile_arn,omitempty" url:"instance_profile_arn,omitempty"`
SpotBidPricePercent int32 `json:"spot_bid_price_percent,omitempty" url:"spot_bid_price_percent,omitempty"`
EbsVolumeType *EbsVolumeType `json:"ebs_volume_type,omitempty" url:"ebs_volume_type,omitempty"`
EbsVolumeCount int32 `json:"ebs_volume_count,omitempty" url:"ebs_volume_count,omitempty"`
EbsVolumeSize int32 `json:"ebs_volume_size,omitempty" url:"ebs_volume_size,omitempty"`
}

View File

@ -1,9 +1,9 @@
package models
type AwsAvailability string
const (
AwsAvailabilitySpot = "SPOT"
AwsAvailabilityOnDemand = "ON_DEMAND"
AwsAvailabilitySpotWithFallback = "SPOT_WITH_FALLBACK"
)
package models
type AwsAvailability string
const (
AwsAvailabilitySpot = "SPOT"
AwsAvailabilityOnDemand = "ON_DEMAND"
AwsAvailabilitySpotWithFallback = "SPOT_WITH_FALLBACK"
)

View File

@ -1,18 +1,18 @@
package models
type ClusterAttributes struct {
ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"`
SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"`
SparkConf *SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"`
AwsAttributes *AwsAttributes `json:"aws_attributes,omitempty" url:"aws_attributes,omitempty"`
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"`
SSHPublicKeys []string `json:"ssh_public_keys,omitempty" url:"ssh_public_keys,omitempty"`
CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"`
ClusterLogConf *ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"`
InitScripts []InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"`
AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"`
ClusterSource *AwsAvailability `json:"cluster_source,omitempty" url:"cluster_source,omitempty"`
}
package models
type ClusterAttributes struct {
ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"`
SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"`
SparkConf *SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"`
AwsAttributes *AwsAttributes `json:"aws_attributes,omitempty" url:"aws_attributes,omitempty"`
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"`
SSHPublicKeys []string `json:"ssh_public_keys,omitempty" url:"ssh_public_keys,omitempty"`
CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"`
ClusterLogConf *ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"`
InitScripts []InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"`
AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"`
ClusterSource *AwsAvailability `json:"cluster_source,omitempty" url:"cluster_source,omitempty"`
}

View File

@ -1,7 +1,7 @@
package models
type ClusterCloudProviderNodeInfo struct {
Status *ClusterCloudProviderNodeStatus `json:"status,omitempty" url:"status,omitempty"`
AvailableCoreQuota int32 `json:"available_core_quota,omitempty" url:"available_core_quota,omitempty"`
TotalCoreQuota int32 `json:"total_core_quota,omitempty" url:"total_core_quota,omitempty"`
}
package models
type ClusterCloudProviderNodeInfo struct {
Status *ClusterCloudProviderNodeStatus `json:"status,omitempty" url:"status,omitempty"`
AvailableCoreQuota int32 `json:"available_core_quota,omitempty" url:"available_core_quota,omitempty"`
TotalCoreQuota int32 `json:"total_core_quota,omitempty" url:"total_core_quota,omitempty"`
}

View File

@ -1,8 +1,8 @@
package models
type ClusterCloudProviderNodeStatus string
const (
ClusterCloudProviderNodeStatusNotEnabledOnSubscription = "NotEnabledOnSubscription"
ClusterCloudProviderNodeStatusNotAvailableInRegion = "NotAvailableInRegion"
)
package models
type ClusterCloudProviderNodeStatus string
const (
ClusterCloudProviderNodeStatusNotEnabledOnSubscription = "NotEnabledOnSubscription"
ClusterCloudProviderNodeStatusNotAvailableInRegion = "NotAvailableInRegion"
)

View File

@ -1,8 +1,8 @@
package models
type ClusterEvent struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
Timestamp int64 `json:"timestamp,omitempty" url:"timestamp,omitempty"`
Type *ClusterEventType `json:"type,omitempty" url:"type,omitempty"`
Details *AwsAttributes `json:"details,omitempty" url:"details,omitempty"`
}
package models
type ClusterEvent struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
Timestamp int64 `json:"timestamp,omitempty" url:"timestamp,omitempty"`
Type *ClusterEventType `json:"type,omitempty" url:"type,omitempty"`
Details *AwsAttributes `json:"details,omitempty" url:"details,omitempty"`
}

View File

@ -1,27 +1,27 @@
package models
type ClusterEventType string
const (
ClusterEventTypeCreating = "CREATING"
ClusterEventTypeDidNotExpandDisk = "DID_NOT_EXPAND_DISK"
ClusterEventTypeExpandedDisk = "EXPANDED_DISK"
ClusterEventTypeFailedToExpandDisk = "FAILED_TO_EXPAND_DISK"
ClusterEventTypeInitScriptStarting = "INIT_SCRIPTS_STARTING"
ClusterEventTypeInitScriptFinished = "INIT_SCRIPTS_FINISHED"
ClusterEventTypeStarting = "STARTING"
ClusterEventTypeRestarting = "RESTARTING"
ClusterEventTypeTerminating = "TERMINATING"
ClusterEventTypeEdited = "EDITED"
ClusterEventTypeRunning = "RUNNING"
ClusterEventTypeResizing = "RESIZING"
ClusterEventTypeUpsizeCompleted = "UPSIZE_COMPLETED"
ClusterEventTypeNodesLost = "NODES_LOST"
ClusterEventTypeDriverHealthy = "DRIVER_HEALTHY"
ClusterEventTypeDriverUnavailable = "DRIVER_UNAVAILABLE"
ClusterEventTypeSparkException = "SPARK_EXCEPTION"
ClusterEventTypeDriverNotResponding = "DRIVER_NOT_RESPONDING"
ClusterEventTypeDbfsDown = "DBFS_DOWN"
ClusterEventTypeMetastoreDown = "METASTORE_DOWN"
ClusterEventTypeAutoscalingStatsReport = "AUTOSCALING_STATS_REPORT"
)
package models
type ClusterEventType string
const (
ClusterEventTypeCreating = "CREATING"
ClusterEventTypeDidNotExpandDisk = "DID_NOT_EXPAND_DISK"
ClusterEventTypeExpandedDisk = "EXPANDED_DISK"
ClusterEventTypeFailedToExpandDisk = "FAILED_TO_EXPAND_DISK"
ClusterEventTypeInitScriptStarting = "INIT_SCRIPTS_STARTING"
ClusterEventTypeInitScriptFinished = "INIT_SCRIPTS_FINISHED"
ClusterEventTypeStarting = "STARTING"
ClusterEventTypeRestarting = "RESTARTING"
ClusterEventTypeTerminating = "TERMINATING"
ClusterEventTypeEdited = "EDITED"
ClusterEventTypeRunning = "RUNNING"
ClusterEventTypeResizing = "RESIZING"
ClusterEventTypeUpsizeCompleted = "UPSIZE_COMPLETED"
ClusterEventTypeNodesLost = "NODES_LOST"
ClusterEventTypeDriverHealthy = "DRIVER_HEALTHY"
ClusterEventTypeDriverUnavailable = "DRIVER_UNAVAILABLE"
ClusterEventTypeSparkException = "SPARK_EXCEPTION"
ClusterEventTypeDriverNotResponding = "DRIVER_NOT_RESPONDING"
ClusterEventTypeDbfsDown = "DBFS_DOWN"
ClusterEventTypeMetastoreDown = "METASTORE_DOWN"
ClusterEventTypeAutoscalingStatsReport = "AUTOSCALING_STATS_REPORT"
)

View File

@ -1,37 +1,37 @@
package models
type ClusterInfo struct {
NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"`
AutoScale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"`
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"`
Driver *SparkNode `json:"driver,omitempty" url:"driver,omitempty"`
Executors []SparkNode `json:"executors,omitempty" url:"executors,omitempty"`
SparkContextID int64 `json:"spark_context_id,omitempty" url:"spark_context_id,omitempty"`
JdbcPort int32 `json:"jdbc_port,omitempty" url:"jdbc_port,omitempty"`
ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"`
SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"`
SparkConf *SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"`
AwsAttributes *AwsAttributes `json:"aws_attributes,omitempty" url:"aws_attributes,omitempty"`
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"`
SSHPublicKeys []string `json:"ssh_public_keys,omitempty" url:"ssh_public_keys,omitempty"`
CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"`
ClusterLogConf *ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"`
InitScripts []InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"`
AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"`
ClusterSource *AwsAvailability `json:"cluster_source,omitempty" url:"cluster_source,omitempty"`
State *ClusterState `json:"state,omitempty" url:"state,omitempty"`
StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"`
StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"`
TerminateTime int64 `json:"terminate_time,omitempty" url:"terminate_time,omitempty"`
LastStateLossTime int64 `json:"last_state_loss_time,omitempty" url:"last_state_loss_time,omitempty"`
LastActivityTime int64 `json:"last_activity_time,omitempty" url:"last_activity_time,omitempty"`
ClusterMemoryMb int64 `json:"cluster_memory_mb,omitempty" url:"cluster_memory_mb,omitempty"`
ClusterCores float32 `json:"cluster_cores,omitempty" url:"cluster_cores,omitempty"`
DefaultTags []ClusterTag `json:"default_tags,omitempty" url:"default_tags,omitempty"`
ClusterLogStatus *LogSyncStatus `json:"cluster_log_status,omitempty" url:"cluster_log_status,omitempty"`
TerminationReason *S3StorageInfo `json:"termination_reason,omitempty" url:"termination_reason,omitempty"`
}
package models
type ClusterInfo struct {
NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"`
AutoScale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"`
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"`
Driver *SparkNode `json:"driver,omitempty" url:"driver,omitempty"`
Executors []SparkNode `json:"executors,omitempty" url:"executors,omitempty"`
SparkContextID int64 `json:"spark_context_id,omitempty" url:"spark_context_id,omitempty"`
JdbcPort int32 `json:"jdbc_port,omitempty" url:"jdbc_port,omitempty"`
ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"`
SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"`
SparkConf *SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"`
AwsAttributes *AwsAttributes `json:"aws_attributes,omitempty" url:"aws_attributes,omitempty"`
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"`
SSHPublicKeys []string `json:"ssh_public_keys,omitempty" url:"ssh_public_keys,omitempty"`
CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"`
ClusterLogConf *ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"`
InitScripts []InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"`
AutoterminationMinutes int32 `json:"autotermination_minutes,omitempty" url:"autotermination_minutes,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"`
ClusterSource *AwsAvailability `json:"cluster_source,omitempty" url:"cluster_source,omitempty"`
State *ClusterState `json:"state,omitempty" url:"state,omitempty"`
StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"`
StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"`
TerminateTime int64 `json:"terminate_time,omitempty" url:"terminate_time,omitempty"`
LastStateLossTime int64 `json:"last_state_loss_time,omitempty" url:"last_state_loss_time,omitempty"`
LastActivityTime int64 `json:"last_activity_time,omitempty" url:"last_activity_time,omitempty"`
ClusterMemoryMb int64 `json:"cluster_memory_mb,omitempty" url:"cluster_memory_mb,omitempty"`
ClusterCores float32 `json:"cluster_cores,omitempty" url:"cluster_cores,omitempty"`
DefaultTags []ClusterTag `json:"default_tags,omitempty" url:"default_tags,omitempty"`
ClusterLogStatus *LogSyncStatus `json:"cluster_log_status,omitempty" url:"cluster_log_status,omitempty"`
TerminationReason *S3StorageInfo `json:"termination_reason,omitempty" url:"termination_reason,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type ClusterInstance struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
SparkContextID string `json:"spark_context_id,omitempty" url:"spark_context_id,omitempty"`
}
package models
type ClusterInstance struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
SparkContextID string `json:"spark_context_id,omitempty" url:"spark_context_id,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type ClusterLibraryStatuses struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
LibraryStatuses []LibraryFullStatus `json:"library_statuses,omitempty" url:"library_statuses,omitempty"`
}
package models
type ClusterLibraryStatuses struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
LibraryStatuses []LibraryFullStatus `json:"library_statuses,omitempty" url:"library_statuses,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type ClusterLogConf struct {
Dbfs *DbfsStorageInfo `json:"dbfs,omitempty" url:"dbfs,omitempty"`
S3 *S3StorageInfo `json:"s3,omitempty" url:"s3,omitempty"`
}
package models
type ClusterLogConf struct {
Dbfs *DbfsStorageInfo `json:"dbfs,omitempty" url:"dbfs,omitempty"`
S3 *S3StorageInfo `json:"s3,omitempty" url:"s3,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type ClusterSize struct {
NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"`
Autoscale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"`
}
package models
type ClusterSize struct {
NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"`
Autoscale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"`
}

View File

@ -1,9 +1,9 @@
package models
type ClusterSource string
const (
ClusterSourceUI = "UI"
ClusterSourceJob = "JOB"
ClusterSourceAPI = "API"
)
package models
type ClusterSource string
const (
ClusterSourceUI = "UI"
ClusterSourceJob = "JOB"
ClusterSourceAPI = "API"
)

View File

@ -1,7 +1,7 @@
package models
type ClusterSpec struct {
ExistingClusterID string `json:"existing_cluster_id,omitempty" url:"existing_cluster_id,omitempty"`
NewCluster *NewCluster `json:"new_cluster,omitempty" url:"new_cluster,omitempty"`
Libraries []Library `json:"libraries,omitempty" url:"libraries,omitempty"`
}
package models
type ClusterSpec struct {
ExistingClusterID string `json:"existing_cluster_id,omitempty" url:"existing_cluster_id,omitempty"`
NewCluster *NewCluster `json:"new_cluster,omitempty" url:"new_cluster,omitempty"`
Libraries []Library `json:"libraries,omitempty" url:"libraries,omitempty"`
}

View File

@ -1,13 +1,13 @@
package models
type ClusterState string
const (
ClusterStatePending = "PENDING"
ClusterStateRunning = "RUNNING"
ClusterStateRestarting = "RESTARTING"
ClusterStateResizing = "RESIZING"
ClusterStateTerminating = "TERMINATING"
ClusterStateError = "ERROR"
ClusterStateUnknown = "UNKNOWN"
)
package models
type ClusterState string
const (
ClusterStatePending = "PENDING"
ClusterStateRunning = "RUNNING"
ClusterStateRestarting = "RESTARTING"
ClusterStateResizing = "RESIZING"
ClusterStateTerminating = "TERMINATING"
ClusterStateError = "ERROR"
ClusterStateUnknown = "UNKNOWN"
)

View File

@ -1,6 +1,6 @@
package models
type ClusterTag struct {
Key string `json:"key,omitempty" url:"key,omitempty"`
Value string `json:"value,omitempty" url:"value,omitempty"`
}
package models
type ClusterTag struct {
Key string `json:"key,omitempty" url:"key,omitempty"`
Value string `json:"value,omitempty" url:"value,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type CronSchedule struct {
QuartzCronExpression string `json:"quartz_cron_expression,omitempty" url:"quartz_cron_expression,omitempty"`
TimezoneID string `json:"timezone_id,omitempty" url:"timezone_id,omitempty"`
}
package models
type CronSchedule struct {
QuartzCronExpression string `json:"quartz_cron_expression,omitempty" url:"quartz_cron_expression,omitempty"`
TimezoneID string `json:"timezone_id,omitempty" url:"timezone_id,omitempty"`
}

View File

@ -1,5 +1,5 @@
package models
type DbfsStorageInfo struct {
Destination string `json:"destination,omitempty" url:"destination,omitempty"`
}
package models
type DbfsStorageInfo struct {
Destination string `json:"destination,omitempty" url:"destination,omitempty"`
}

View File

@ -1,7 +1,7 @@
package models
type DiskSpec struct {
DiskType *DiskType `json:"disk_type,omitempty" url:"disk_type,omitempty"`
DiskCount int32 `json:"disk_count,omitempty" url:"disk_count,omitempty"`
DiskSize int32 `json:"disk_size,omitempty" url:"disk_size,omitempty"`
}
package models
type DiskSpec struct {
DiskType *DiskType `json:"disk_type,omitempty" url:"disk_type,omitempty"`
DiskCount int32 `json:"disk_count,omitempty" url:"disk_count,omitempty"`
DiskSize int32 `json:"disk_size,omitempty" url:"disk_size,omitempty"`
}

View File

@ -1,5 +1,5 @@
package models
type DiskType struct {
EbsVolumeType EbsVolumeType `json:"ebs_volume_type,omitempty" url:"ebs_volume_type,omitempty"`
}
package models
type DiskType struct {
EbsVolumeType EbsVolumeType `json:"ebs_volume_type,omitempty" url:"ebs_volume_type,omitempty"`
}

View File

@ -1,8 +1,8 @@
package models
type EbsVolumeType string
const (
EbsVolumeTypeGeneralPurposeSsd = "GENERAL_PURPOSE_SSD"
EbsVolumeTypeThroughputOptimizedHdd = "THROUGHPUT_OPTIMIZED_HDD"
)
package models
type EbsVolumeType string
const (
EbsVolumeTypeGeneralPurposeSsd = "GENERAL_PURPOSE_SSD"
EbsVolumeTypeThroughputOptimizedHdd = "THROUGHPUT_OPTIMIZED_HDD"
)

View File

@ -1,10 +1,10 @@
package models
type EventDetails struct {
CurrentNumWorkers int32 `json:"current_num_workers,omitempty" url:"current_num_workers,omitempty"`
TargetNumWorkers int32 `json:"target_num_workers,omitempty" url:"target_num_workers,omitempty"`
PreviousAttributes *ClusterAttributes `json:"previous_attributes,omitempty" url:"previous_attributes,omitempty"`
Attributes *ClusterAttributes `json:"attributes,omitempty" url:"attributes,omitempty"`
PreviousClusterSize *ClusterSize `json:"previous_cluster_size,omitempty" url:"previous_cluster_size,omitempty"`
ClusterSize *ClusterSize `json:"cluster_size,omitempty" url:"cluster_size,omitempty"`
}
package models
type EventDetails struct {
CurrentNumWorkers int32 `json:"current_num_workers,omitempty" url:"current_num_workers,omitempty"`
TargetNumWorkers int32 `json:"target_num_workers,omitempty" url:"target_num_workers,omitempty"`
PreviousAttributes *ClusterAttributes `json:"previous_attributes,omitempty" url:"previous_attributes,omitempty"`
Attributes *ClusterAttributes `json:"attributes,omitempty" url:"attributes,omitempty"`
PreviousClusterSize *ClusterSize `json:"previous_cluster_size,omitempty" url:"previous_cluster_size,omitempty"`
ClusterSize *ClusterSize `json:"cluster_size,omitempty" url:"cluster_size,omitempty"`
}

View File

@ -1,10 +1,10 @@
package models
type ExportFormat string
const (
ExportFormatSource = "SOURCE"
ExportFormatHtml = "HTML"
ExportFormatJupyter = "JUPYTER"
ExportFormatDbc = "DBC"
)
package models
type ExportFormat string
const (
ExportFormatSource = "SOURCE"
ExportFormatHtml = "HTML"
ExportFormatJupyter = "JUPYTER"
ExportFormatDbc = "DBC"
)

View File

@ -1,7 +1,7 @@
package models
type FileInfo struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
IsDir bool `json:"is_dir,omitempty" url:"is_dir,omitempty"`
FileSize int64 `json:"file_size,omitempty" url:"file_size,omitempty"`
}
package models
type FileInfo struct {
Path string `json:"path,omitempty" url:"path,omitempty"`
IsDir bool `json:"is_dir,omitempty" url:"is_dir,omitempty"`
FileSize int64 `json:"file_size,omitempty" url:"file_size,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type InitScriptInfo struct {
Dbfs *DbfsStorageInfo `json:"dbfs,omitempty" url:"dbfs,omitempty"`
S3 *S3StorageInfo `json:"s3,omitempty" url:"s3,omitempty"`
}
package models
type InitScriptInfo struct {
Dbfs *DbfsStorageInfo `json:"dbfs,omitempty" url:"dbfs,omitempty"`
S3 *S3StorageInfo `json:"s3,omitempty" url:"s3,omitempty"`
}

View File

@ -1,18 +1,18 @@
package models
type InstancePoolAndStats struct {
InstancePoolName string `json:"instance_pool_name,omitempty" url:"instance_pool_name,omitempty"`
MinIdleInstances int32 `json:"min_idle_instances,omitempty" url:"min_idle_instances,omitempty"`
MaxCapacity int32 `json:"max_capacity,omitempty" url:"max_capacity,omitempty"`
AwsAttributes InstancePoolAwsAttributes `json:"aws_attributes,omitempty" url:"aws_attributes,omitempty"`
NodetypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"`
IdleInstanceAutoterminationMinutes int32 `json:"idle_instance_autotermination_minutes,omitempty" url:"idle_instance_autotermination_minutes,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"`
DiskSpec DiskSpec `json:"disk_spec,omitempty" url:"disk_spec,omitempty"`
PreloadedSparkVersions []string `json:"preloaded_spark_versions,omitempty" url:"preloaded_spark_versions,omitempty"`
InstancePoolID string `json:"instance_pool_id,omitempty" url:"instance_pool_id,omitempty"`
DefaultTags []ClusterTag `json:"default_tags,omitempty" url:"default_tags,omitempty"`
State InstancePoolState `json:"state,omitempty" url:"state,omitempty"`
Stats InstancePoolStats `json:"stats,omitempty" url:"stats,omitempty"`
}
package models
type InstancePoolAndStats struct {
InstancePoolName string `json:"instance_pool_name,omitempty" url:"instance_pool_name,omitempty"`
MinIdleInstances int32 `json:"min_idle_instances,omitempty" url:"min_idle_instances,omitempty"`
MaxCapacity int32 `json:"max_capacity,omitempty" url:"max_capacity,omitempty"`
AwsAttributes InstancePoolAwsAttributes `json:"aws_attributes,omitempty" url:"aws_attributes,omitempty"`
NodetypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"`
IdleInstanceAutoterminationMinutes int32 `json:"idle_instance_autotermination_minutes,omitempty" url:"idle_instance_autotermination_minutes,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"`
DiskSpec DiskSpec `json:"disk_spec,omitempty" url:"disk_spec,omitempty"`
PreloadedSparkVersions []string `json:"preloaded_spark_versions,omitempty" url:"preloaded_spark_versions,omitempty"`
InstancePoolID string `json:"instance_pool_id,omitempty" url:"instance_pool_id,omitempty"`
DefaultTags []ClusterTag `json:"default_tags,omitempty" url:"default_tags,omitempty"`
State InstancePoolState `json:"state,omitempty" url:"state,omitempty"`
Stats InstancePoolStats `json:"stats,omitempty" url:"stats,omitempty"`
}

View File

@ -1,7 +1,7 @@
package models
type InstancePoolAwsAttributes struct {
Availability AwsAvailability `json:"availability,omitempty" url:"availability,omitempty"`
ZoneID string `json:"zone_id,omitempty" url:"zone_id,omitempty"`
SpotBidPricePercent int32 `json:"spot_bid_price_percent,omitempty" url:"spot_bid_price_percent,omitempty"`
}
package models
type InstancePoolAwsAttributes struct {
Availability AwsAvailability `json:"availability,omitempty" url:"availability,omitempty"`
ZoneID string `json:"zone_id,omitempty" url:"zone_id,omitempty"`
SpotBidPricePercent int32 `json:"spot_bid_price_percent,omitempty" url:"spot_bid_price_percent,omitempty"`
}

View File

@ -1,8 +1,8 @@
package models
type InstancePoolState string
const (
InstancePoolStateActive = "ACTIVE"
InstancePoolStateDeleted = "DELETED"
)
package models
type InstancePoolState string
const (
InstancePoolStateActive = "ACTIVE"
InstancePoolStateDeleted = "DELETED"
)

View File

@ -1,8 +1,8 @@
package models
type InstancePoolStats struct {
UsedCount int32 `json:"used_count,omitempty" url:"used_count,omitempty"`
IdleCount int32 `json:"idle_count,omitempty" url:"idle_count,omitempty"`
PendingUsedCount int32 `json:"pending_used_count,omitempty" url:"pending_used_count,omitempty"`
PendingIdleCount int32 `json:"pending_idle_count,omitempty" url:"pending_idle_count,omitempty"`
}
package models
type InstancePoolStats struct {
UsedCount int32 `json:"used_count,omitempty" url:"used_count,omitempty"`
IdleCount int32 `json:"idle_count,omitempty" url:"idle_count,omitempty"`
PendingUsedCount int32 `json:"pending_used_count,omitempty" url:"pending_used_count,omitempty"`
PendingIdleCount int32 `json:"pending_idle_count,omitempty" url:"pending_idle_count,omitempty"`
}

View File

@ -1,5 +1,5 @@
package models
type InstanceProfile struct {
InstanceProfileArn string `json:"instance_profile_arn,omitempty" url:"instance_profile_arn,omitempty"`
}
package models
type InstanceProfile struct {
InstanceProfileArn string `json:"instance_profile_arn,omitempty" url:"instance_profile_arn,omitempty"`
}

View File

@ -1,8 +1,8 @@
package models
type Job struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"`
Settings *JobSettings `json:"settings,omitempty" url:"settings,omitempty"`
CreatedTime int64 `json:"created_time,omitempty" url:"created_time,omitempty"`
}
package models
type Job struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"`
Settings *JobSettings `json:"settings,omitempty" url:"settings,omitempty"`
CreatedTime int64 `json:"created_time,omitempty" url:"created_time,omitempty"`
}

View File

@ -1,8 +1,8 @@
package models
type JobEmailNotifications struct {
OnStart []string `json:"on_start,omitempty" url:"on_start,omitempty"`
OnSuccess []string `json:"on_success,omitempty" url:"on_success,omitempty"`
OnFailure []string `json:"on_failure,omitempty" url:"on_failure,omitempty"`
NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty" url:"no_alert_for_skipped_runs,omitempty"`
}
package models
type JobEmailNotifications struct {
OnStart []string `json:"on_start,omitempty" url:"on_start,omitempty"`
OnSuccess []string `json:"on_success,omitempty" url:"on_success,omitempty"`
OnFailure []string `json:"on_failure,omitempty" url:"on_failure,omitempty"`
NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty" url:"no_alert_for_skipped_runs,omitempty"`
}

View File

@ -1,19 +1,19 @@
package models
type JobSettings struct {
ExistingClusterID string `json:"existing_cluster_id,omitempty" url:"existing_cluster_id,omitempty"`
NewCluster *NewCluster `json:"new_cluster,omitempty" url:"new_cluster,omitempty"`
NotebookTask *NotebookTask `json:"notebook_task,omitempty" url:"notebook_task,omitempty"`
SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" url:"spark_jar_task,omitempty"`
SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" url:"spark_python_task,omitempty"`
SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" url:"spark_submit_task,omitempty"`
Name string `json:"name,omitempty" url:"name,omitempty"`
Libraries []Library `json:"libraries,omitempty" url:"libraries,omitempty"`
EmailNotifications *JobEmailNotifications `json:"email_notifications,omitempty" url:"email_notifications,omitempty"`
TimeoutSeconds int32 `json:"timeout_seconds,omitempty" url:"timeout_seconds,omitempty"`
MaxRetries int32 `json:"max_retries,omitempty" url:"max_retries,omitempty"`
MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty" url:"min_retry_interval_millis,omitempty"`
RetryOnTimeout bool `json:"retry_on_timeout,omitempty" url:"retry_on_timeout,omitempty"`
Schedule *CronSchedule `json:"schedule,omitempty" url:"schedule,omitempty"`
MaxConcurrentRuns int32 `json:"max_concurrent_runs,omitempty" url:"max_concurrent_runs,omitempty"`
}
package models
type JobSettings struct {
ExistingClusterID string `json:"existing_cluster_id,omitempty" url:"existing_cluster_id,omitempty"`
NewCluster *NewCluster `json:"new_cluster,omitempty" url:"new_cluster,omitempty"`
NotebookTask *NotebookTask `json:"notebook_task,omitempty" url:"notebook_task,omitempty"`
SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" url:"spark_jar_task,omitempty"`
SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" url:"spark_python_task,omitempty"`
SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" url:"spark_submit_task,omitempty"`
Name string `json:"name,omitempty" url:"name,omitempty"`
Libraries []Library `json:"libraries,omitempty" url:"libraries,omitempty"`
EmailNotifications *JobEmailNotifications `json:"email_notifications,omitempty" url:"email_notifications,omitempty"`
TimeoutSeconds int32 `json:"timeout_seconds,omitempty" url:"timeout_seconds,omitempty"`
MaxRetries int32 `json:"max_retries,omitempty" url:"max_retries,omitempty"`
MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty" url:"min_retry_interval_millis,omitempty"`
RetryOnTimeout bool `json:"retry_on_timeout,omitempty" url:"retry_on_timeout,omitempty"`
Schedule *CronSchedule `json:"schedule,omitempty" url:"schedule,omitempty"`
MaxConcurrentRuns int32 `json:"max_concurrent_runs,omitempty" url:"max_concurrent_runs,omitempty"`
}

View File

@ -1,8 +1,8 @@
package models
type JobTask struct {
NotebookTask *NotebookTask `json:"notebook_task,omitempty" url:"notebook_task,omitempty"`
SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" url:"spark_jar_task,omitempty"`
SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" url:"spark_python_task,omitempty"`
SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" url:"spark_submit_task,omitempty"`
}
package models
type JobTask struct {
NotebookTask *NotebookTask `json:"notebook_task,omitempty" url:"notebook_task,omitempty"`
SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" url:"spark_jar_task,omitempty"`
SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" url:"spark_python_task,omitempty"`
SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" url:"spark_submit_task,omitempty"`
}

View File

@ -1,10 +1,10 @@
package models
type Language string
const (
LanguageScala = "SCALA"
LanguagePython = "PYTHON"
LanguageSQL = "SQL"
LanguageR = "R"
)
package models
type Language string
const (
LanguageScala = "SCALA"
LanguagePython = "PYTHON"
LanguageSQL = "SQL"
LanguageR = "R"
)

View File

@ -1,10 +1,10 @@
package models
type Library struct {
Jar string `json:"jar,omitempty" url:"jar,omitempty"`
Egg string `json:"egg,omitempty" url:"egg,omitempty"`
Whl string `json:"whl,omitempty" url:"whl,omitempty"`
Pypi *PythonPyPiLibrary `json:"pypi,omitempty" url:"pypi,omitempty"`
Maven *MavenLibrary `json:"maven,omitempty" url:"maven,omitempty"`
Cran *RCranLibrary `json:"cran,omitempty" url:"cran,omitempty"`
}
package models
type Library struct {
Jar string `json:"jar,omitempty" url:"jar,omitempty"`
Egg string `json:"egg,omitempty" url:"egg,omitempty"`
Whl string `json:"whl,omitempty" url:"whl,omitempty"`
Pypi *PythonPyPiLibrary `json:"pypi,omitempty" url:"pypi,omitempty"`
Maven *MavenLibrary `json:"maven,omitempty" url:"maven,omitempty"`
Cran *RCranLibrary `json:"cran,omitempty" url:"cran,omitempty"`
}

View File

@ -1,8 +1,8 @@
package models
type LibraryFullStatus struct {
Library *Library `json:"library,omitempty" url:"library,omitempty"`
Status *LibraryInstallStatus `json:"status,omitempty" url:"status,omitempty"`
Messages []string `json:"messages,omitempty" url:"messages,omitempty"`
IsLibraryForAllClusters bool `json:"is_library_for_all_clusters,omitempty" url:"is_library_for_all_clusters,omitempty"`
}
package models
type LibraryFullStatus struct {
Library *Library `json:"library,omitempty" url:"library,omitempty"`
Status *LibraryInstallStatus `json:"status,omitempty" url:"status,omitempty"`
Messages []string `json:"messages,omitempty" url:"messages,omitempty"`
IsLibraryForAllClusters bool `json:"is_library_for_all_clusters,omitempty" url:"is_library_for_all_clusters,omitempty"`
}

View File

@ -1,12 +1,12 @@
package models
type LibraryInstallStatus string
const (
LibraryInstallStatusPending = "PENDING"
LibraryInstallStatusResolving = "RESOLVING"
LibraryInstallStatusInstalling = "INSTALLING"
LibraryInstallStatusInstalled = "INSTALLED"
LibraryInstallStatusFailed = "FAILED"
LibraryInstallStatusUninstallOnRestart = "UNINSTALL_ON_RESTART"
)
package models
type LibraryInstallStatus string
const (
LibraryInstallStatusPending = "PENDING"
LibraryInstallStatusResolving = "RESOLVING"
LibraryInstallStatusInstalling = "INSTALLING"
LibraryInstallStatusInstalled = "INSTALLED"
LibraryInstallStatusFailed = "FAILED"
LibraryInstallStatusUninstallOnRestart = "UNINSTALL_ON_RESTART"
)

View File

@ -1,8 +1,8 @@
package models
type ListOrder string
const (
ListOrderDesc = "DESC"
ListOrderAsc = "ASC"
)
package models
type ListOrder string
const (
ListOrderDesc = "DESC"
ListOrderAsc = "ASC"
)

View File

@ -1,6 +1,6 @@
package models
type LogSyncStatus struct {
LastAttempted int64 `json:"last_attempted,omitempty" url:"last_attempted,omitempty"`
LastException string `json:"last_exception,omitempty" url:"last_exception,omitempty"`
}
package models
type LogSyncStatus struct {
LastAttempted int64 `json:"last_attempted,omitempty" url:"last_attempted,omitempty"`
LastException string `json:"last_exception,omitempty" url:"last_exception,omitempty"`
}

View File

@ -1,7 +1,7 @@
package models
type MavenLibrary struct {
Coordinates string `json:"coordinates,omitempty" url:"coordinates,omitempty"`
Repo string `json:"repo,omitempty" url:"repo,omitempty"`
Exclusions []string `json:"exclusions,omitempty" url:"exclusions,omitempty"`
}
package models
type MavenLibrary struct {
Coordinates string `json:"coordinates,omitempty" url:"coordinates,omitempty"`
Repo string `json:"repo,omitempty" url:"repo,omitempty"`
Exclusions []string `json:"exclusions,omitempty" url:"exclusions,omitempty"`
}

View File

@ -1,18 +1,18 @@
package models
type NewCluster struct {
NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"`
Autoscale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"`
ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"`
SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"`
SparkConf *SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"`
AwsAttributes *AwsAttributes `json:"aws_attributes,omitempty" url:"aws_attributes,omitempty"`
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"`
SSHPublicKeys []string `json:"ssh_public_keys,omitempty" url:"ssh_public_keys,omitempty"`
CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"`
ClusterLogConf *ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"`
InitScripts []InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"`
}
package models
type NewCluster struct {
NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"`
Autoscale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"`
ClusterName string `json:"cluster_name,omitempty" url:"cluster_name,omitempty"`
SparkVersion string `json:"spark_version,omitempty" url:"spark_version,omitempty"`
SparkConf *SparkConfPair `json:"spark_conf,omitempty" url:"spark_conf,omitempty"`
AwsAttributes *AwsAttributes `json:"aws_attributes,omitempty" url:"aws_attributes,omitempty"`
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
DriverNodeTypeID string `json:"driver_node_type_id,omitempty" url:"driver_node_type_id,omitempty"`
SSHPublicKeys []string `json:"ssh_public_keys,omitempty" url:"ssh_public_keys,omitempty"`
CustomTags []ClusterTag `json:"custom_tags,omitempty" url:"custom_tags,omitempty"`
ClusterLogConf *ClusterLogConf `json:"cluster_log_conf,omitempty" url:"cluster_log_conf,omitempty"`
InitScripts []InitScriptInfo `json:"init_scripts,omitempty" url:"init_scripts,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty" url:"spark_env_vars,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" url:"enable_elastic_disk,omitempty"`
}

View File

@ -1,11 +1,11 @@
package models
type NodeType struct {
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
MemoryMb int32 `json:"memory_mb,omitempty" url:"memory_mb,omitempty"`
NumCores float32 `json:"num_cores,omitempty" url:"num_cores,omitempty"`
Description string `json:"description,omitempty" url:"description,omitempty"`
InstanceTypeID string `json:"instance_type_id,omitempty" url:"instance_type_id,omitempty"`
IsDeprecated bool `json:"is_deprecated,omitempty" url:"is_deprecated,omitempty"`
NodeInfo *ClusterCloudProviderNodeInfo `json:"node_info,omitempty" url:"node_info,omitempty"`
}
package models
type NodeType struct {
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
MemoryMb int32 `json:"memory_mb,omitempty" url:"memory_mb,omitempty"`
NumCores float32 `json:"num_cores,omitempty" url:"num_cores,omitempty"`
Description string `json:"description,omitempty" url:"description,omitempty"`
InstanceTypeID string `json:"instance_type_id,omitempty" url:"instance_type_id,omitempty"`
IsDeprecated bool `json:"is_deprecated,omitempty" url:"is_deprecated,omitempty"`
NodeInfo *ClusterCloudProviderNodeInfo `json:"node_info,omitempty" url:"node_info,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type NotebookOutput struct {
Result string `json:"result,omitempty" url:"result,omitempty"`
Truncated bool `json:"truncated,omitempty" url:"truncated,omitempty"`
}
package models
type NotebookOutput struct {
Result string `json:"result,omitempty" url:"result,omitempty"`
Truncated bool `json:"truncated,omitempty" url:"truncated,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type NotebookTask struct {
NotebookPath string `json:"notebook_path,omitempty" url:"notebook_path,omitempty"`
BaseParameters map[string]string `json:"base_parameters,omitempty" url:"base_parameters,omitempty"`
}
package models
type NotebookTask struct {
NotebookPath string `json:"notebook_path,omitempty" url:"notebook_path,omitempty"`
BaseParameters map[string]string `json:"base_parameters,omitempty" url:"base_parameters,omitempty"`
}

View File

@ -1,7 +1,7 @@
package models
type ObjectInfo struct {
ObjectType *ObjectType `json:"object_type,omitempty" url:"object_type,omitempty"`
Path string `json:"path,omitempty" url:"path,omitempty"`
Language *Language `json:"language,omitempty" url:"language,omitempty"`
}
package models
type ObjectInfo struct {
ObjectType *ObjectType `json:"object_type,omitempty" url:"object_type,omitempty"`
Path string `json:"path,omitempty" url:"path,omitempty"`
Language *Language `json:"language,omitempty" url:"language,omitempty"`
}

View File

@ -1,9 +1,9 @@
package models
type ObjectType string
const (
ObjectTypeNotebook = "NOTEBOOK"
ObjectTypeDirectory = "DIRECTORY"
ObjectTypeLibrary = "LIBRARY"
)
package models
type ObjectType string
const (
ObjectTypeNotebook = "NOTEBOOK"
ObjectTypeDirectory = "DIRECTORY"
ObjectTypeLibrary = "LIBRARY"
)

View File

@ -1,6 +1,6 @@
package models
type ParamPair struct {
Key string `json:"key,omitempty" url:"key,omitempty"`
Value string `json:"value,omitempty" url:"value,omitempty"`
}
package models
type ParamPair struct {
Key string `json:"key,omitempty" url:"key,omitempty"`
Value string `json:"value,omitempty" url:"value,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type ParameterPair struct {
Key string `json:"key,omitempty" url:"key,omitempty"`
Value string `json:"value,omitempty" url:"value,omitempty"`
}
package models
type ParameterPair struct {
Key string `json:"key,omitempty" url:"key,omitempty"`
Value string `json:"value,omitempty" url:"value,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type PrincipalName struct {
UserName string `json:"user_name,omitempty" url:"user_name,omitempty"`
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
}
package models
type PrincipalName struct {
UserName string `json:"user_name,omitempty" url:"user_name,omitempty"`
GroupName string `json:"group_name,omitempty" url:"group_name,omitempty"`
}

View File

@ -1,8 +1,8 @@
package models
type PublicTokenInfo struct {
TokenID string `json:"token_id,omitempty" url:"token_id,omitempty"`
CreationTime int64 `json:"creation_time,omitempty" url:"creation_time,omitempty"`
ExpiryTime int64 `json:"expiry_time,omitempty" url:"expiry_time,omitempty"`
Comment string `json:"comment,omitempty" url:"comment,omitempty"`
}
package models
type PublicTokenInfo struct {
TokenID string `json:"token_id,omitempty" url:"token_id,omitempty"`
CreationTime int64 `json:"creation_time,omitempty" url:"creation_time,omitempty"`
ExpiryTime int64 `json:"expiry_time,omitempty" url:"expiry_time,omitempty"`
Comment string `json:"comment,omitempty" url:"comment,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type PythonPyPiLibrary struct {
Package string `json:"package,omitempty" url:"package,omitempty"`
Repo string `json:"repo,omitempty" url:"repo,omitempty"`
}
package models
type PythonPyPiLibrary struct {
Package string `json:"package,omitempty" url:"package,omitempty"`
Repo string `json:"repo,omitempty" url:"repo,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type RCranLibrary struct {
Package string `json:"package,omitempty" url:"package,omitempty"`
Repo string `json:"repo,omitempty" url:"repo,omitempty"`
}
package models
type RCranLibrary struct {
Package string `json:"package,omitempty" url:"package,omitempty"`
Repo string `json:"repo,omitempty" url:"repo,omitempty"`
}

View File

@ -1,9 +1,9 @@
package models
type ResizeCause string
const (
ResizeCauseAutoscale = "AUTOSCALE"
ResizeCauseUserRequest = "USER_REQUEST"
ResizeCauseAutorecovery = "AUTORECOVERY"
)
package models
type ResizeCause string
const (
ResizeCauseAutoscale = "AUTOSCALE"
ResizeCauseUserRequest = "USER_REQUEST"
ResizeCauseAutorecovery = "AUTORECOVERY"
)

View File

@ -1,20 +1,20 @@
package models
type Run struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"`
CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"`
NumberInJob int64 `json:"number_in_job,omitempty" url:"number_in_job,omitempty"`
OriginalAttemptRunID int64 `json:"original_attempt_run_id,omitempty" url:"original_attempt_run_id,omitempty"`
State *RunState `json:"state,omitempty" url:"state,omitempty"`
Schedule *CronSchedule `json:"schedule,omitempty" url:"schedule,omitempty"`
Task *JobTask `json:"task,omitempty" url:"task,omitempty"`
ClusterSpec *ClusterSpec `json:"cluster_spec,omitempty" url:"cluster_spec,omitempty"`
ClusterInstance *ClusterInstance `json:"cluster_instance,omitempty" url:"cluster_instance,omitempty"`
OverridingParameters *RunParameters `json:"overriding_parameters,omitempty" url:"overriding_parameters,omitempty"`
StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"`
SetupDuration int64 `json:"setup_duration,omitempty" url:"setup_duration,omitempty"`
ExecutionDuration int64 `json:"execution_duration,omitempty" url:"execution_duration,omitempty"`
CleanupDuration int64 `json:"cleanup_duration,omitempty" url:"cleanup_duration,omitempty"`
Trigger *TriggerType `json:"trigger,omitempty" url:"trigger,omitempty"`
}
package models
type Run struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
RunID int64 `json:"run_id,omitempty" url:"run_id,omitempty"`
CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"`
NumberInJob int64 `json:"number_in_job,omitempty" url:"number_in_job,omitempty"`
OriginalAttemptRunID int64 `json:"original_attempt_run_id,omitempty" url:"original_attempt_run_id,omitempty"`
State *RunState `json:"state,omitempty" url:"state,omitempty"`
Schedule *CronSchedule `json:"schedule,omitempty" url:"schedule,omitempty"`
Task *JobTask `json:"task,omitempty" url:"task,omitempty"`
ClusterSpec *ClusterSpec `json:"cluster_spec,omitempty" url:"cluster_spec,omitempty"`
ClusterInstance *ClusterInstance `json:"cluster_instance,omitempty" url:"cluster_instance,omitempty"`
OverridingParameters *RunParameters `json:"overriding_parameters,omitempty" url:"overriding_parameters,omitempty"`
StartTime int64 `json:"start_time,omitempty" url:"start_time,omitempty"`
SetupDuration int64 `json:"setup_duration,omitempty" url:"setup_duration,omitempty"`
ExecutionDuration int64 `json:"execution_duration,omitempty" url:"execution_duration,omitempty"`
CleanupDuration int64 `json:"cleanup_duration,omitempty" url:"cleanup_duration,omitempty"`
Trigger *TriggerType `json:"trigger,omitempty" url:"trigger,omitempty"`
}

View File

@ -1,12 +1,12 @@
package models
type RunLifeCycleState string
const (
RunLifeCycleStatePending = "PENDING"
RunLifeCycleStateRunning = "RUNNING"
RunLifeCycleStateTerminating = "TERMINATING"
RunLifeCycleStateTerminated = "TERMINATED"
RunLifeCycleStateSkipped = "SKIPPED"
RunLifeCycleStateInternalError = "INTERNAL_ERROR"
)
package models
type RunLifeCycleState string
const (
RunLifeCycleStatePending = "PENDING"
RunLifeCycleStateRunning = "RUNNING"
RunLifeCycleStateTerminating = "TERMINATING"
RunLifeCycleStateTerminated = "TERMINATED"
RunLifeCycleStateSkipped = "SKIPPED"
RunLifeCycleStateInternalError = "INTERNAL_ERROR"
)

View File

@ -1,8 +1,8 @@
package models
type RunParameters struct {
JarParams []string `json:"jar_params,omitempty" url:"jar_params,omitempty"`
NotebookParams map[string]string `json:"notebook_params,omitempty" url:"notebook_params,omitempty"`
PythonParams []string `json:"python_params,omitempty" url:"python_params,omitempty"`
SparkSubmitParams []string `json:"spark_submit_params,omitempty" url:"spark_submit_params,omitempty"`
}
package models
type RunParameters struct {
JarParams []string `json:"jar_params,omitempty" url:"jar_params,omitempty"`
NotebookParams map[string]string `json:"notebook_params,omitempty" url:"notebook_params,omitempty"`
PythonParams []string `json:"python_params,omitempty" url:"python_params,omitempty"`
SparkSubmitParams []string `json:"spark_submit_params,omitempty" url:"spark_submit_params,omitempty"`
}

View File

@ -1,10 +1,10 @@
package models
type RunResultState string
const (
RunResultStateSuccess = "SUCCESS"
RunResultStateFailed = "FAILED"
RunResultStateTimedout = "TIMEDOUT"
RunResultStateCanceled = "CANCELED"
)
package models
type RunResultState string
const (
RunResultStateSuccess = "SUCCESS"
RunResultStateFailed = "FAILED"
RunResultStateTimedout = "TIMEDOUT"
RunResultStateCanceled = "CANCELED"
)

View File

@ -1,7 +1,7 @@
package models
type RunState struct {
LifeCycleState *RunLifeCycleState `json:"life_cycle_state,omitempty" url:"life_cycle_state,omitempty"`
ResultState *RunResultState `json:"result_state,omitempty" url:"result_state,omitempty"`
StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"`
}
package models
type RunState struct {
LifeCycleState *RunLifeCycleState `json:"life_cycle_state,omitempty" url:"life_cycle_state,omitempty"`
ResultState *RunResultState `json:"result_state,omitempty" url:"result_state,omitempty"`
StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"`
}

View File

@ -1,11 +1,11 @@
package models
type S3StorageInfo struct {
Destination string `json:"destination,omitempty" url:"destination,omitempty"`
Region string `json:"region,omitempty" url:"region,omitempty"`
Endpoint string `json:"endpoint,omitempty" url:"endpoint,omitempty"`
EnableEncryption bool `json:"enable_encryption,omitempty" url:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty" url:"encryption_type,omitempty"`
KmsKey string `json:"kms_key,omitempty" url:"kms_key,omitempty"`
CannedACL string `json:"canned_acl,omitempty" url:"canned_acl,omitempty"`
}
package models
type S3StorageInfo struct {
Destination string `json:"destination,omitempty" url:"destination,omitempty"`
Region string `json:"region,omitempty" url:"region,omitempty"`
Endpoint string `json:"endpoint,omitempty" url:"endpoint,omitempty"`
EnableEncryption bool `json:"enable_encryption,omitempty" url:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty" url:"encryption_type,omitempty"`
KmsKey string `json:"kms_key,omitempty" url:"kms_key,omitempty"`
CannedACL string `json:"canned_acl,omitempty" url:"canned_acl,omitempty"`
}

View File

@ -1,7 +1,7 @@
package models
type ScopeBackendType string
const (
ScopeBackendTypeDatabricks = "DATABRICKS"
)
package models
type ScopeBackendType string
const (
ScopeBackendTypeDatabricks = "DATABRICKS"
)

View File

@ -1,6 +1,6 @@
package models
type SecretMetadata struct {
Key string `json:"key,omitempty" url:"key,omitempty"`
LastUpdatedTimestamp int64 `json:"last_updated_timestamp,omitempty" url:"last_updated_timestamp,omitempty"`
}
package models
type SecretMetadata struct {
Key string `json:"key,omitempty" url:"key,omitempty"`
LastUpdatedTimestamp int64 `json:"last_updated_timestamp,omitempty" url:"last_updated_timestamp,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type SecretScope struct {
Name string `json:"name,omitempty" url:"name,omitempty"`
BackendType *ScopeBackendType `json:"backend_type,omitempty" url:"backend_type,omitempty"`
}
package models
type SecretScope struct {
Name string `json:"name,omitempty" url:"name,omitempty"`
BackendType *ScopeBackendType `json:"backend_type,omitempty" url:"backend_type,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type SparkConfPair struct {
Key string `json:"key,omitempty" url:"key,omitempty"`
Value string `json:"value,omitempty" url:"value,omitempty"`
}
package models
type SparkConfPair struct {
Key string `json:"key,omitempty" url:"key,omitempty"`
Value string `json:"value,omitempty" url:"value,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type SparkEnvPair struct {
Key string `json:"key,omitempty" url:"key,omitempty"`
Value string `json:"value,omitempty" url:"value,omitempty"`
}
package models
type SparkEnvPair struct {
Key string `json:"key,omitempty" url:"key,omitempty"`
Value string `json:"value,omitempty" url:"value,omitempty"`
}

View File

@ -1,7 +1,7 @@
package models
type SparkJarTask struct {
JarURI string `json:"jar_uri,omitempty" url:"jar_uri,omitempty"`
MainClassName string `json:"main_class_name,omitempty" url:"main_class_name,omitempty"`
Parameters []string `json:"parameters,omitempty" url:"parameters,omitempty"`
}
package models
type SparkJarTask struct {
JarURI string `json:"jar_uri,omitempty" url:"jar_uri,omitempty"`
MainClassName string `json:"main_class_name,omitempty" url:"main_class_name,omitempty"`
Parameters []string `json:"parameters,omitempty" url:"parameters,omitempty"`
}

View File

@ -1,11 +1,11 @@
package models
type SparkNode struct {
PrivateIP string `json:"private_ip,omitempty" url:"private_ip,omitempty"`
PublicDNS string `json:"public_dns,omitempty" url:"public_dns,omitempty"`
NodeID string `json:"node_id,omitempty" url:"node_id,omitempty"`
InstanceID string `json:"instance_id,omitempty" url:"instance_id,omitempty"`
StartTimestamp int64 `json:"start_timestamp,omitempty" url:"start_timestamp,omitempty"`
NodeAwsAttributes *SparkNodeAwsAttributes `json:"node_aws_attributes,omitempty" url:"node_aws_attributes,omitempty"`
HostPrivateIP string `json:"host_private_ip,omitempty" url:"host_private_ip,omitempty"`
}
package models
type SparkNode struct {
PrivateIP string `json:"private_ip,omitempty" url:"private_ip,omitempty"`
PublicDNS string `json:"public_dns,omitempty" url:"public_dns,omitempty"`
NodeID string `json:"node_id,omitempty" url:"node_id,omitempty"`
InstanceID string `json:"instance_id,omitempty" url:"instance_id,omitempty"`
StartTimestamp int64 `json:"start_timestamp,omitempty" url:"start_timestamp,omitempty"`
NodeAwsAttributes *SparkNodeAwsAttributes `json:"node_aws_attributes,omitempty" url:"node_aws_attributes,omitempty"`
HostPrivateIP string `json:"host_private_ip,omitempty" url:"host_private_ip,omitempty"`
}

View File

@ -1,5 +1,5 @@
package models
type SparkNodeAwsAttributes struct {
IsSpot bool `json:"is_spot,omitempty" url:"is_spot,omitempty"`
}
package models
type SparkNodeAwsAttributes struct {
IsSpot bool `json:"is_spot,omitempty" url:"is_spot,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type SparkPythonTask struct {
PythonFile string `json:"python_file,omitempty" url:"python_file,omitempty"`
Parameters []string `json:"parameters,omitempty" url:"parameters,omitempty"`
}
package models
type SparkPythonTask struct {
PythonFile string `json:"python_file,omitempty" url:"python_file,omitempty"`
Parameters []string `json:"parameters,omitempty" url:"parameters,omitempty"`
}

View File

@ -1,5 +1,5 @@
package models
type SparkSubmitTask struct {
Parameters []string `json:"parameters,omitempty" url:"parameters,omitempty"`
}
package models
type SparkSubmitTask struct {
Parameters []string `json:"parameters,omitempty" url:"parameters,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type SparkVersion struct {
Key string `json:"key,omitempty" url:"key,omitempty"`
Name string `json:"name,omitempty" url:"name,omitempty"`
}
package models
type SparkVersion struct {
Key string `json:"key,omitempty" url:"key,omitempty"`
Name string `json:"name,omitempty" url:"name,omitempty"`
}

View File

@ -1,20 +1,20 @@
package models
type TerminationCode string
const (
TerminationCodeUserRequest = "USER_REQUEST"
TerminationCodeJobFinished = "JOB_FINISHED"
TerminationCodeInactivity = "INACTIVITY"
TerminationCodeCloudProviderShutdown = "CLOUD_PROVIDER_SHUTDOWN"
TerminationCodeCommunicationLost = "COMMUNICATION_LOST"
TerminationCodeCloudProviderLaunchFailure = "CLOUD_PROVIDER_LAUNCH_FAILURE"
TerminationCodeSparkStartupFailure = "SPARK_STARTUP_FAILURE"
TerminationCodeInvalidArgument = "INVALID_ARGUMENT"
TerminationCodeUnexpectedLaunchFailure = "UNEXPECTED_LAUNCH_FAILURE"
TerminationCodeInternalError = "INTERNAL_ERROR"
TerminationCodeInstanceUnreachable = "INSTANCE_UNREACHABLE"
TerminationCodeRequestRejected = "REQUEST_REJECTED"
TerminationCodeInitScriptFailure = "INIT_SCRIPT_FAILURE"
TerminationCodeTrialExpired = "TRIAL_EXPIRED"
)
package models
type TerminationCode string
const (
TerminationCodeUserRequest = "USER_REQUEST"
TerminationCodeJobFinished = "JOB_FINISHED"
TerminationCodeInactivity = "INACTIVITY"
TerminationCodeCloudProviderShutdown = "CLOUD_PROVIDER_SHUTDOWN"
TerminationCodeCommunicationLost = "COMMUNICATION_LOST"
TerminationCodeCloudProviderLaunchFailure = "CLOUD_PROVIDER_LAUNCH_FAILURE"
TerminationCodeSparkStartupFailure = "SPARK_STARTUP_FAILURE"
TerminationCodeInvalidArgument = "INVALID_ARGUMENT"
TerminationCodeUnexpectedLaunchFailure = "UNEXPECTED_LAUNCH_FAILURE"
TerminationCodeInternalError = "INTERNAL_ERROR"
TerminationCodeInstanceUnreachable = "INSTANCE_UNREACHABLE"
TerminationCodeRequestRejected = "REQUEST_REJECTED"
TerminationCodeInitScriptFailure = "INIT_SCRIPT_FAILURE"
TerminationCodeTrialExpired = "TRIAL_EXPIRED"
)

View File

@ -1,17 +1,17 @@
package models
type TerminationParameter string
const (
TerminationParameterUsername = "username"
TerminationParameterAwsAPIErrorCode = "aws_api_error_code"
TerminationParameterAwsInstanceStateReason = "aws_instance_state_reason"
TerminationParameterAwsSpotRequestStatus = "aws_spot_request_status"
TerminationParameterAwsSpotRequestFaultCode = "aws_spot_request_fault_code"
TerminationParameterAwsImpairedStatusDetails = "aws_impaired_status_details"
TerminationParameterAwsInstanceStatusEvent = "aws_instance_status_event"
TerminationParameterAwsErrorMessage = "aws_error_message"
TerminationParameterDatabricksErrorMessage = "databricks_error_message"
TerminationParameterInactivityDurationMin = "inactivity_duration_min"
TerminationParameterInstanceID = "instance_id"
)
package models
type TerminationParameter string
const (
TerminationParameterUsername = "username"
TerminationParameterAwsAPIErrorCode = "aws_api_error_code"
TerminationParameterAwsInstanceStateReason = "aws_instance_state_reason"
TerminationParameterAwsSpotRequestStatus = "aws_spot_request_status"
TerminationParameterAwsSpotRequestFaultCode = "aws_spot_request_fault_code"
TerminationParameterAwsImpairedStatusDetails = "aws_impaired_status_details"
TerminationParameterAwsInstanceStatusEvent = "aws_instance_status_event"
TerminationParameterAwsErrorMessage = "aws_error_message"
TerminationParameterDatabricksErrorMessage = "databricks_error_message"
TerminationParameterInactivityDurationMin = "inactivity_duration_min"
TerminationParameterInstanceID = "instance_id"
)

View File

@ -1,6 +1,6 @@
package models
type TerminationReason struct {
Code *TerminationCode `json:"code,omitempty" url:"code,omitempty"`
Parameters []ParameterPair `json:"parameters,omitempty" url:"parameters,omitempty"`
}
package models
type TerminationReason struct {
Code *TerminationCode `json:"code,omitempty" url:"code,omitempty"`
Parameters []ParameterPair `json:"parameters,omitempty" url:"parameters,omitempty"`
}

View File

@ -1,9 +1,9 @@
package models
type TriggerType string
const (
TriggerTypePeriodic = "PERIODIC"
TriggerTypeOneTime = "ONE_TIME"
TriggerTypeRetry = "RETRY"
)
package models
type TriggerType string
const (
TriggerTypePeriodic = "PERIODIC"
TriggerTypeOneTime = "ONE_TIME"
TriggerTypeRetry = "RETRY"
)

View File

@ -1,7 +1,7 @@
package models
type ViewItem struct {
Content string `json:"content,omitempty" url:"content,omitempty"`
Name string `json:"name,omitempty" url:"name,omitempty"`
Type *ViewType `json:"type,omitempty" url:"type,omitempty"`
}
package models
type ViewItem struct {
Content string `json:"content,omitempty" url:"content,omitempty"`
Name string `json:"name,omitempty" url:"name,omitempty"`
Type *ViewType `json:"type,omitempty" url:"type,omitempty"`
}

View File

@ -1,8 +1,8 @@
package models
type ViewType string
const (
ViewTypeNotebook = "NOTEBOOK"
ViewTypeDashboard = "DASHBOARD"
)
package models
type ViewType string
const (
ViewTypeNotebook = "NOTEBOOK"
ViewTypeDashboard = "DASHBOARD"
)

View File

@ -1,9 +1,9 @@
package models
type ViewsToExport string
const (
ViewsToExportCode = "CODE"
ViewsToExportDashboards = "DASHBOARDS"
ViewsToExportAll = "ALL"
)
package models
type ViewsToExport string
const (
ViewsToExportCode = "CODE"
ViewsToExportDashboards = "DASHBOARDS"
ViewsToExportAll = "ALL"
)

File diff suppressed because it is too large Load Diff

View File

@ -1,2 +1,2 @@
// +k8s:deepcopy-gen=package
package models
// +k8s:deepcopy-gen=package
package models

View File

@ -1,11 +1,11 @@
package aws
// ScimAPI exposes the SCIM API
type ScimAPI struct {
Client DBClient
}
func (a ScimAPI) init(client DBClient) ScimAPI {
a.Client = client
return a
}
package aws
// ScimAPI exposes the SCIM API
type ScimAPI struct {
Client DBClient
}
func (a ScimAPI) init(client DBClient) ScimAPI {
a.Client = client
return a
}

Some files were not shown because too many files have changed in this diff Show More