Merge pull request #22 from EliiseS/es/add-building-linting-to-make-file

Add building/linting/testing to make file and fix issues
This commit is contained in:
azadeh khojandi 2020-03-14 17:23:11 +11:00 committed by GitHub
commit 9a14864c91
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
29 changed files with 151 additions and 122 deletions

View File

@ -19,36 +19,26 @@ RUN apt-get update \
&& go get -x -d github.com/stamblerre/gocode 2>&1 \
&& go build -o gocode-gomod github.com/stamblerre/gocode \
&& mv gocode-gomod $GOPATH/bin/ \
# Install Go tools
&& go get -u -v \
github.com/mdempsky/gocode \
github.com/uudashr/gopkgs/cmd/gopkgs \
github.com/ramya-rao-a/go-outline \
github.com/acroca/go-symbols \
github.com/godoctor/godoctor \
golang.org/x/tools/cmd/guru \
golang.org/x/tools/cmd/gorename \
github.com/rogpeppe/godef \
github.com/zmb3/gogetdoc \
github.com/haya14busa/goplay/cmd/goplay \
github.com/sqs/goreturns \
github.com/josharian/impl \
github.com/davidrjenni/reftools/cmd/fillstruct \
github.com/fatih/gomodifytags \
github.com/cweill/gotests/... \
golang.org/x/tools/cmd/goimports \
golang.org/x/lint/golint \
golang.org/x/tools/cmd/gopls \
github.com/alecthomas/gometalinter \
honnef.co/go/tools/... \
github.com/golangci/golangci-lint/cmd/golangci-lint \
github.com/mgechev/revive \
github.com/derekparker/delve/cmd/dlv 2>&1 \
# Clean up
&& apt-get autoremove -y \
&& apt-get clean -y \
&& rm -rf /var/lib/apt/lists/*
# Enable go modules
ENV GO111MODULE=on
# Install Go tools
RUN \
# --> Go language server
go get golang.org/x/tools/gopls@v0.3.3 \
# --> GolangCI-lint
&& curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sed 's/tar -/tar --no-same-owner -/g' | sh -s -- -b $(go env GOPATH)/bin \
# --> Delve for debugging
&& go get github.com/go-delve/delve/cmd/dlv@v1.4.0 \
# --> Go-outline for extracting a JSON representation of the declarations in a Go source file
&& go get -v github.com/ramya-rao-a/go-outline \
&& rm -rf /go/src/ && rm -rf /go/pkg
RUN apt-get update \
# Install Docker CE CLI
&& apt-get install -y apt-transport-https ca-certificates curl gnupg-agent software-properties-common lsb-release \
@ -61,12 +51,7 @@ RUN apt-get update \
RUN apt-get -y install git procps wget nano zsh inotify-tools jq
RUN wget https://github.com/robbyrussell/oh-my-zsh/raw/master/tools/install.sh -O - | zsh || true
ENV GO111MODULE=on
COPY ./Makefile ./
RUN mkdir -p /go/src/github.com/xinsnake/databricks-sdk-golang
ENV SHELL /bin/bash
ENV SHELL /bin/bash

View File

@ -2,10 +2,10 @@
{
"name": "Go",
"dockerComposeFile": "docker-compose.yml",
"service": "docker-in-docker",
"service": "docker-in-docker",
"workspaceFolder": "/go/src/github.com/xinsnake/databricks-sdk-golang",
"postCreateCommand": "",
"shutdownAction": "stopCompose",
"shutdownAction": "stopCompose",
"extensions": [
"ms-azuretools.vscode-docker",
"ms-vscode.go"
@ -13,13 +13,36 @@
"settings": {
"terminal.integrated.shell.linux": "zsh",
"go.gopath": "/go",
"go.inferGopath": true,
"go.useLanguageServer": true,
"[go]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true,
},
// Optional: Disable snippets, as they conflict with completion ranking.
"editor.snippetSuggestions": "none",
},
"[go.mod]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true,
},
},
"gopls": {
"usePlaceholders": true, // add parameter placeholders when completing a function
// Experimental settings
"completeUnimported": true, // autocomplete unimported packages
"deepCompletion": true // enable deep completion
},
"go.toolsEnvVars": {
"GO111MODULE": "on"
},
"go.lintTool": "golangci-lint",
"go.lintFlags": [
"--fast"
],
"remote.extensionKind": {
"ms-azuretools.vscode-docker": "workspace"
}
}
}
}

View File

@ -1,2 +1,14 @@
all : checks test
checks:
go build all
golangci-lint run
test: checks
go test ./...
fmt:
find . -name '*.go' | grep -v vendor | xargs gofmt -s -w
deepcopy:
./cmd/deepcopy-gen -i ./,./aws/...,./azure/... -h ./hack/boilerplate.go.txt -v 3

View File

@ -109,6 +109,10 @@ func (a ClustersAPI) Get(clusterID string) (models.ClusterInfo, error) {
}
resp, err := a.Client.performQuery(http.MethodGet, "/clusters/get-delete", data, nil)
if err != nil {
return clusterInfo, err
}
err = json.Unmarshal(resp, &clusterInfo)
return clusterInfo, err
}

View File

@ -60,6 +60,10 @@ func (a DbfsAPI) Create(path string, overwrite bool) (DbfsCreateResponse, error)
}
resp, err := a.Client.performQuery(http.MethodPost, "/dbfs/create", data, nil)
if err != nil {
return createResponse, err
}
err = json.Unmarshal(resp, &createResponse)
return createResponse, err
}
@ -88,7 +92,12 @@ func (a DbfsAPI) GetStatus(path string) (models.FileInfo, error) {
}
resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/get-status", data, nil)
if err != nil {
return fileInfo, err
}
err = json.Unmarshal(resp, &fileInfo)
return fileInfo, err
}
@ -108,6 +117,10 @@ func (a DbfsAPI) List(path string) ([]models.FileInfo, error) {
}
resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/list", data, nil)
if err != nil {
return listResponse.Files, err
}
err = json.Unmarshal(resp, &listResponse)
return listResponse.Files, err
}
@ -176,6 +189,10 @@ func (a DbfsAPI) Read(path string, offset, length int64) (DbfsReadResponse, erro
}
resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/read", data, nil)
if err != nil {
return readResponse, err
}
err = json.Unmarshal(resp, &readResponseBase64)
if err != nil {
return readResponse, err

View File

@ -195,23 +195,6 @@ func (in *GroupsCreateResponse) DeepCopy() *GroupsCreateResponse {
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *InstancePoolsAPI) DeepCopyInto(out *InstancePoolsAPI) {
*out = *in
in.Client.DeepCopyInto(&out.Client)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstancePoolsAPI.
func (in *InstancePoolsAPI) DeepCopy() *InstancePoolsAPI {
if in == nil {
return nil
}
out := new(InstancePoolsAPI)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *InstanceProfilesAPI) DeepCopyInto(out *InstanceProfilesAPI) {
*out = *in

View File

@ -1,11 +0,0 @@
package aws
// InstancePoolsAPI exposes the InstancePools API
type InstancePoolsAPI struct {
Client DBClient
}
func (a InstancePoolsAPI) init(client DBClient) InstancePoolsAPI {
a.Client = client
return a
}

View File

@ -1,8 +1,8 @@
package models
type ClusterEvent struct {
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
Timestamp int64 `json:"timestamp,omitempty" url:"timestamp,omitempty"`
ClusterID string `json:"cluster_id,omitempty" url:"cluster_id,omitempty"`
Timestamp int64 `json:"timestamp,omitempty" url:"timestamp,omitempty"`
Type *ClusterEventType `json:"type,omitempty" url:"type,omitempty"`
Details *AwsAttributes `json:"details,omitempty" url:"details,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type ClusterSize struct {
NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"`
NumWorkers int32 `json:"num_workers,omitempty" url:"num_workers,omitempty"`
Autoscale *AutoScale `json:"autoscale,omitempty" url:"autoscale,omitempty"`
}

View File

@ -1,7 +1,7 @@
package models
type ClusterSpec struct {
ExistingClusterID string `json:"existing_cluster_id,omitempty" url:"existing_cluster_id,omitempty"`
ExistingClusterID string `json:"existing_cluster_id,omitempty" url:"existing_cluster_id,omitempty"`
NewCluster *NewCluster `json:"new_cluster,omitempty" url:"new_cluster,omitempty"`
Libraries []Library `json:"libraries,omitempty" url:"libraries,omitempty"`
Libraries []Library `json:"libraries,omitempty" url:"libraries,omitempty"`
}

View File

@ -1,7 +1,7 @@
package models
type DiskSpec struct {
DiskType *DiskType `json:"disk_type,omitempty" url:"disk_type,omitempty"`
DiskType *DiskType `json:"disk_type,omitempty" url:"disk_type,omitempty"`
DiskCount int32 `json:"disk_count,omitempty" url:"disk_count,omitempty"`
DiskSize int32 `json:"disk_size,omitempty" url:"disk_size,omitempty"`
}
}

View File

@ -1,8 +1,8 @@
package models
type EventDetails struct {
CurrentNumWorkers int32 `json:"current_num_workers,omitempty" url:"current_num_workers,omitempty"`
TargetNumWorkers int32 `json:"target_num_workers,omitempty" url:"target_num_workers,omitempty"`
CurrentNumWorkers int32 `json:"current_num_workers,omitempty" url:"current_num_workers,omitempty"`
TargetNumWorkers int32 `json:"target_num_workers,omitempty" url:"target_num_workers,omitempty"`
PreviousAttributes *ClusterAttributes `json:"previous_attributes,omitempty" url:"previous_attributes,omitempty"`
Attributes *ClusterAttributes `json:"attributes,omitempty" url:"attributes,omitempty"`
PreviousClusterSize *ClusterSize `json:"previous_cluster_size,omitempty" url:"previous_cluster_size,omitempty"`

View File

@ -1,8 +1,8 @@
package models
type Job struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"`
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"`
Settings *JobSettings `json:"settings,omitempty" url:"settings,omitempty"`
CreatedTime int64 `json:"created_time,omitempty" url:"created_time,omitempty"`
CreatedTime int64 `json:"created_time,omitempty" url:"created_time,omitempty"`
}

View File

@ -1,19 +1,19 @@
package models
type JobSettings struct {
ExistingClusterID string `json:"existing_cluster_id,omitempty" url:"existing_cluster_id,omitempty"`
ExistingClusterID string `json:"existing_cluster_id,omitempty" url:"existing_cluster_id,omitempty"`
NewCluster *NewCluster `json:"new_cluster,omitempty" url:"new_cluster,omitempty"`
NotebookTask *NotebookTask `json:"notebook_task,omitempty" url:"notebook_task,omitempty"`
SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" url:"spark_jar_task,omitempty"`
SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" url:"spark_python_task,omitempty"`
SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" url:"spark_submit_task,omitempty"`
Name string `json:"name,omitempty" url:"name,omitempty"`
Libraries []Library `json:"libraries,omitempty" url:"libraries,omitempty"`
Name string `json:"name,omitempty" url:"name,omitempty"`
Libraries []Library `json:"libraries,omitempty" url:"libraries,omitempty"`
EmailNotifications *JobEmailNotifications `json:"email_notifications,omitempty" url:"email_notifications,omitempty"`
TimeoutSeconds int32 `json:"timeout_seconds,omitempty" url:"timeout_seconds,omitempty"`
MaxRetries int32 `json:"max_retries,omitempty" url:"max_retries,omitempty"`
MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty" url:"min_retry_interval_millis,omitempty"`
RetryOnTimeout bool `json:"retry_on_timeout,omitempty" url:"retry_on_timeout,omitempty"`
TimeoutSeconds int32 `json:"timeout_seconds,omitempty" url:"timeout_seconds,omitempty"`
MaxRetries int32 `json:"max_retries,omitempty" url:"max_retries,omitempty"`
MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty" url:"min_retry_interval_millis,omitempty"`
RetryOnTimeout bool `json:"retry_on_timeout,omitempty" url:"retry_on_timeout,omitempty"`
Schedule *CronSchedule `json:"schedule,omitempty" url:"schedule,omitempty"`
MaxConcurrentRuns int32 `json:"max_concurrent_runs,omitempty" url:"max_concurrent_runs,omitempty"`
MaxConcurrentRuns int32 `json:"max_concurrent_runs,omitempty" url:"max_concurrent_runs,omitempty"`
}

View File

@ -1,9 +1,9 @@
package models
type Library struct {
Jar string `json:"jar,omitempty" url:"jar,omitempty"`
Egg string `json:"egg,omitempty" url:"egg,omitempty"`
Whl string `json:"whl,omitempty" url:"whl,omitempty"`
Jar string `json:"jar,omitempty" url:"jar,omitempty"`
Egg string `json:"egg,omitempty" url:"egg,omitempty"`
Whl string `json:"whl,omitempty" url:"whl,omitempty"`
Pypi *PythonPyPiLibrary `json:"pypi,omitempty" url:"pypi,omitempty"`
Maven *MavenLibrary `json:"maven,omitempty" url:"maven,omitempty"`
Cran *RCranLibrary `json:"cran,omitempty" url:"cran,omitempty"`

View File

@ -3,6 +3,6 @@ package models
type LibraryFullStatus struct {
Library *Library `json:"library,omitempty" url:"library,omitempty"`
Status *LibraryInstallStatus `json:"status,omitempty" url:"status,omitempty"`
Messages []string `json:"messages,omitempty" url:"messages,omitempty"`
IsLibraryForAllClusters bool `json:"is_library_for_all_clusters,omitempty" url:"is_library_for_all_clusters,omitempty"`
Messages []string `json:"messages,omitempty" url:"messages,omitempty"`
IsLibraryForAllClusters bool `json:"is_library_for_all_clusters,omitempty" url:"is_library_for_all_clusters,omitempty"`
}

View File

@ -1,11 +1,11 @@
package models
type NodeType struct {
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
MemoryMb int32 `json:"memory_mb,omitempty" url:"memory_mb,omitempty"`
NumCores float32 `json:"num_cores,omitempty" url:"num_cores,omitempty"`
Description string `json:"description,omitempty" url:"description,omitempty"`
InstanceTypeID string `json:"instance_type_id,omitempty" url:"instance_type_id,omitempty"`
IsDeprecated bool `json:"is_deprecated,omitempty" url:"is_deprecated,omitempty"`
NodeTypeID string `json:"node_type_id,omitempty" url:"node_type_id,omitempty"`
MemoryMb int32 `json:"memory_mb,omitempty" url:"memory_mb,omitempty"`
NumCores float32 `json:"num_cores,omitempty" url:"num_cores,omitempty"`
Description string `json:"description,omitempty" url:"description,omitempty"`
InstanceTypeID string `json:"instance_type_id,omitempty" url:"instance_type_id,omitempty"`
IsDeprecated bool `json:"is_deprecated,omitempty" url:"is_deprecated,omitempty"`
NodeInfo *ClusterCloudProviderNodeInfo `json:"node_info,omitempty" url:"node_info,omitempty"`
}

View File

@ -2,6 +2,6 @@ package models
type ObjectInfo struct {
ObjectType *ObjectType `json:"object_type,omitempty" url:"object_type,omitempty"`
Path string `json:"path,omitempty" url:"path,omitempty"`
Path string `json:"path,omitempty" url:"path,omitempty"`
Language *Language `json:"language,omitempty" url:"language,omitempty"`
}

View File

@ -3,5 +3,5 @@ package models
type RunState struct {
LifeCycleState *RunLifeCycleState `json:"life_cycle_state,omitempty" url:"life_cycle_state,omitempty"`
ResultState *RunResultState `json:"result_state,omitempty" url:"result_state,omitempty"`
StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"`
StateMessage string `json:"state_message,omitempty" url:"state_message,omitempty"`
}

View File

@ -1,6 +1,6 @@
package models
type SecretScope struct {
Name string `json:"name,omitempty" url:"name,omitempty"`
Name string `json:"name,omitempty" url:"name,omitempty"`
BackendType *ScopeBackendType `json:"backend_type,omitempty" url:"backend_type,omitempty"`
}

View File

@ -1,11 +1,11 @@
package models
type SparkNode struct {
PrivateIP string `json:"private_ip,omitempty" url:"private_ip,omitempty"`
PublicDNS string `json:"public_dns,omitempty" url:"public_dns,omitempty"`
NodeID string `json:"node_id,omitempty" url:"node_id,omitempty"`
InstanceID string `json:"instance_id,omitempty" url:"instance_id,omitempty"`
StartTimestamp int64 `json:"start_timestamp,omitempty" url:"start_timestamp,omitempty"`
PrivateIP string `json:"private_ip,omitempty" url:"private_ip,omitempty"`
PublicDNS string `json:"public_dns,omitempty" url:"public_dns,omitempty"`
NodeID string `json:"node_id,omitempty" url:"node_id,omitempty"`
InstanceID string `json:"instance_id,omitempty" url:"instance_id,omitempty"`
StartTimestamp int64 `json:"start_timestamp,omitempty" url:"start_timestamp,omitempty"`
NodeAwsAttributes *SparkNodeAwsAttributes `json:"node_aws_attributes,omitempty" url:"node_aws_attributes,omitempty"`
HostPrivateIP string `json:"host_private_ip,omitempty" url:"host_private_ip,omitempty"`
HostPrivateIP string `json:"host_private_ip,omitempty" url:"host_private_ip,omitempty"`
}

View File

@ -2,5 +2,5 @@ package models
type TerminationReason struct {
Code *TerminationCode `json:"code,omitempty" url:"code,omitempty"`
Parameters []ParameterPair `json:"parameters,omitempty" url:"parameters,omitempty"`
Parameters []ParameterPair `json:"parameters,omitempty" url:"parameters,omitempty"`
}

View File

@ -1,7 +1,7 @@
package models
type ViewItem struct {
Content string `json:"content,omitempty" url:"content,omitempty"`
Name string `json:"name,omitempty" url:"name,omitempty"`
Content string `json:"content,omitempty" url:"content,omitempty"`
Name string `json:"name,omitempty" url:"name,omitempty"`
Type *ViewType `json:"type,omitempty" url:"type,omitempty"`
}

View File

@ -60,6 +60,10 @@ func (a DbfsAPI) Create(path string, overwrite bool) (DbfsCreateResponse, error)
}
resp, err := a.Client.performQuery(http.MethodPost, "/dbfs/create", data, nil)
if err != nil {
return createResponse, err
}
err = json.Unmarshal(resp, &createResponse)
return createResponse, err
}
@ -88,6 +92,10 @@ func (a DbfsAPI) GetStatus(path string) (models.FileInfo, error) {
}
resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/get-status", data, nil)
if err != nil {
return fileInfo, err
}
err = json.Unmarshal(resp, &fileInfo)
return fileInfo, err
}
@ -108,6 +116,10 @@ func (a DbfsAPI) List(path string) ([]models.FileInfo, error) {
}
resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/list", data, nil)
if err != nil {
return listResponse.Files, err
}
err = json.Unmarshal(resp, &listResponse)
return listResponse.Files, err
}
@ -176,6 +188,10 @@ func (a DbfsAPI) Read(path string, offset, length int64) (DbfsReadResponse, erro
}
resp, err := a.Client.performQuery(http.MethodGet, "/dbfs/read", data, nil)
if err != nil {
return readResponse, err
}
err = json.Unmarshal(resp, &readResponseBase64)
if err != nil {
return readResponse, err

View File

@ -33,11 +33,11 @@ func (a JobsAPI) Create(jobSettings models.JobSettings) (models.Job, error) {
// JobsListResponse is the response type returned by JobsList
type JobsListResponse = struct {
Jobs []models.Job `json:"jobs,omitempty" url:"jobs,omitempty"`
}{}
}
// List lists all jobs
func (a JobsAPI) List() ([]models.Job, error) {
var jobsList = JobsListResponse
var jobsList JobsListResponse
resp, err := a.Client.performQuery(http.MethodGet, "/jobs/list", nil, nil)
if err != nil {

View File

@ -1,7 +1,7 @@
package models
type DiskSpec struct {
DiskType *DiskType `json:"disk_type,omitempty" url:"disk_type,omitempty"`
DiskType *DiskType `json:"disk_type,omitempty" url:"disk_type,omitempty"`
DiskCount int32 `json:"disk_count,omitempty" url:"disk_count,omitempty"`
DiskSize int32 `json:"disk_size,omitempty" url:"disk_size,omitempty"`
}
}

View File

@ -1,8 +1,8 @@
package models
type Job struct {
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"`
JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"`
CreatorUserName string `json:"creator_user_name,omitempty" url:"creator_user_name,omitempty"`
Settings *JobSettings `json:"settings,omitempty" url:"settings,omitempty"`
CreatedTime int64 `json:"created_time,omitempty" url:"created_time,omitempty"`
CreatedTime int64 `json:"created_time,omitempty" url:"created_time,omitempty"`
}

View File

@ -1,19 +1,19 @@
package models
type JobSettings struct {
ExistingClusterID string `json:"existing_cluster_id,omitempty" url:"existing_cluster_id,omitempty"`
ExistingClusterID string `json:"existing_cluster_id,omitempty" url:"existing_cluster_id,omitempty"`
NewCluster *NewCluster `json:"new_cluster,omitempty" url:"new_cluster,omitempty"`
NotebookTask *NotebookTask `json:"notebook_task,omitempty" url:"notebook_task,omitempty"`
SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" url:"spark_jar_task,omitempty"`
SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" url:"spark_python_task,omitempty"`
SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" url:"spark_submit_task,omitempty"`
Name string `json:"name,omitempty" url:"name,omitempty"`
Libraries []Library `json:"libraries,omitempty" url:"libraries,omitempty"`
Name string `json:"name,omitempty" url:"name,omitempty"`
Libraries []Library `json:"libraries,omitempty" url:"libraries,omitempty"`
EmailNotifications *JobEmailNotifications `json:"email_notifications,omitempty" url:"email_notifications,omitempty"`
TimeoutSeconds int32 `json:"timeout_seconds,omitempty" url:"timeout_seconds,omitempty"`
MaxRetries int32 `json:"max_retries,omitempty" url:"max_retries,omitempty"`
MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty" url:"min_retry_interval_millis,omitempty"`
RetryOnTimeout bool `json:"retry_on_timeout,omitempty" url:"retry_on_timeout,omitempty"`
TimeoutSeconds int32 `json:"timeout_seconds,omitempty" url:"timeout_seconds,omitempty"`
MaxRetries int32 `json:"max_retries,omitempty" url:"max_retries,omitempty"`
MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty" url:"min_retry_interval_millis,omitempty"`
RetryOnTimeout bool `json:"retry_on_timeout,omitempty" url:"retry_on_timeout,omitempty"`
Schedule *CronSchedule `json:"schedule,omitempty" url:"schedule,omitempty"`
MaxConcurrentRuns int32 `json:"max_concurrent_runs,omitempty" url:"max_concurrent_runs,omitempty"`
MaxConcurrentRuns int32 `json:"max_concurrent_runs,omitempty" url:"max_concurrent_runs,omitempty"`
}

View File

@ -20,7 +20,7 @@ func PerformQuery(option DBClientOption, method, path string, data interface{},
requestHeaders := option.getDefaultHeaders()
if headers != nil && len(headers) > 0 {
if len(headers) > 0 {
for k, v := range headers {
requestHeaders[k] = v
}