1. Packages
  2. Databricks Provider
  3. API Docs
  4. SqlGlobalConfig
Databricks v1.65.0 published on Wednesday, Apr 9, 2025 by Pulumi

databricks.SqlGlobalConfig

Explore with Pulumi AI

This resource configures the security policy, databricks_instance_profile, and data access properties for all databricks.SqlEndpoint of workspace. Please note that changing parameters of this resource will restart all running databricks_sql_endpoint. To use this resource you need to be an administrator.

Example Usage

AWS example

import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const _this = new databricks.SqlGlobalConfig("this", {
    securityPolicy: "DATA_ACCESS_CONTROL",
    instanceProfileArn: "arn:....",
    dataAccessConfig: {
        "spark.sql.session.timeZone": "UTC",
    },
});
Copy
import pulumi
import pulumi_databricks as databricks

this = databricks.SqlGlobalConfig("this",
    security_policy="DATA_ACCESS_CONTROL",
    instance_profile_arn="arn:....",
    data_access_config={
        "spark.sql.session.timeZone": "UTC",
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := databricks.NewSqlGlobalConfig(ctx, "this", &databricks.SqlGlobalConfigArgs{
			SecurityPolicy:     pulumi.String("DATA_ACCESS_CONTROL"),
			InstanceProfileArn: pulumi.String("arn:...."),
			DataAccessConfig: pulumi.StringMap{
				"spark.sql.session.timeZone": pulumi.String("UTC"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var @this = new Databricks.SqlGlobalConfig("this", new()
    {
        SecurityPolicy = "DATA_ACCESS_CONTROL",
        InstanceProfileArn = "arn:....",
        DataAccessConfig = 
        {
            { "spark.sql.session.timeZone", "UTC" },
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.SqlGlobalConfig;
import com.pulumi.databricks.SqlGlobalConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var this_ = new SqlGlobalConfig("this", SqlGlobalConfigArgs.builder()
            .securityPolicy("DATA_ACCESS_CONTROL")
            .instanceProfileArn("arn:....")
            .dataAccessConfig(Map.of("spark.sql.session.timeZone", "UTC"))
            .build());

    }
}
Copy
resources:
  this:
    type: databricks:SqlGlobalConfig
    properties:
      securityPolicy: DATA_ACCESS_CONTROL
      instanceProfileArn: arn:....
      dataAccessConfig:
        spark.sql.session.timeZone: UTC
Copy

Azure example

For Azure you should use the data_access_config to provide the service principal configuration. You can use the Databricks SQL Admin Console UI to help you generate the right configuration values.

import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const _this = new databricks.SqlGlobalConfig("this", {
    securityPolicy: "DATA_ACCESS_CONTROL",
    dataAccessConfig: {
        "spark.hadoop.fs.azure.account.auth.type": "OAuth",
        "spark.hadoop.fs.azure.account.oauth.provider.type": "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider",
        "spark.hadoop.fs.azure.account.oauth2.client.id": applicationId,
        "spark.hadoop.fs.azure.account.oauth2.client.secret": `{{secrets/${secretScope}/${secretKey}}}`,
        "spark.hadoop.fs.azure.account.oauth2.client.endpoint": `https://login.microsoftonline.com/${tenantId}/oauth2/token`,
    },
    sqlConfigParams: {
        ANSI_MODE: "true",
    },
});
Copy
import pulumi
import pulumi_databricks as databricks

this = databricks.SqlGlobalConfig("this",
    security_policy="DATA_ACCESS_CONTROL",
    data_access_config={
        "spark.hadoop.fs.azure.account.auth.type": "OAuth",
        "spark.hadoop.fs.azure.account.oauth.provider.type": "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider",
        "spark.hadoop.fs.azure.account.oauth2.client.id": application_id,
        "spark.hadoop.fs.azure.account.oauth2.client.secret": f"{{{{secrets/{secret_scope}/{secret_key}}}}}",
        "spark.hadoop.fs.azure.account.oauth2.client.endpoint": f"https://login.microsoftonline.com/{tenant_id}/oauth2/token",
    },
    sql_config_params={
        "ANSI_MODE": "true",
    })
Copy
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := databricks.NewSqlGlobalConfig(ctx, "this", &databricks.SqlGlobalConfigArgs{
			SecurityPolicy: pulumi.String("DATA_ACCESS_CONTROL"),
			DataAccessConfig: pulumi.StringMap{
				"spark.hadoop.fs.azure.account.auth.type":              pulumi.String("OAuth"),
				"spark.hadoop.fs.azure.account.oauth.provider.type":    pulumi.String("org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider"),
				"spark.hadoop.fs.azure.account.oauth2.client.id":       pulumi.Any(applicationId),
				"spark.hadoop.fs.azure.account.oauth2.client.secret":   pulumi.Sprintf("{{secrets/%v/%v}}", secretScope, secretKey),
				"spark.hadoop.fs.azure.account.oauth2.client.endpoint": pulumi.Sprintf("https://login.microsoftonline.com/%v/oauth2/token", tenantId),
			},
			SqlConfigParams: pulumi.StringMap{
				"ANSI_MODE": pulumi.String("true"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var @this = new Databricks.SqlGlobalConfig("this", new()
    {
        SecurityPolicy = "DATA_ACCESS_CONTROL",
        DataAccessConfig = 
        {
            { "spark.hadoop.fs.azure.account.auth.type", "OAuth" },
            { "spark.hadoop.fs.azure.account.oauth.provider.type", "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider" },
            { "spark.hadoop.fs.azure.account.oauth2.client.id", applicationId },
            { "spark.hadoop.fs.azure.account.oauth2.client.secret", $"{{{{secrets/{secretScope}/{secretKey}}}}}" },
            { "spark.hadoop.fs.azure.account.oauth2.client.endpoint", $"https://login.microsoftonline.com/{tenantId}/oauth2/token" },
        },
        SqlConfigParams = 
        {
            { "ANSI_MODE", "true" },
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.SqlGlobalConfig;
import com.pulumi.databricks.SqlGlobalConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var this_ = new SqlGlobalConfig("this", SqlGlobalConfigArgs.builder()
            .securityPolicy("DATA_ACCESS_CONTROL")
            .dataAccessConfig(Map.ofEntries(
                Map.entry("spark.hadoop.fs.azure.account.auth.type", "OAuth"),
                Map.entry("spark.hadoop.fs.azure.account.oauth.provider.type", "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider"),
                Map.entry("spark.hadoop.fs.azure.account.oauth2.client.id", applicationId),
                Map.entry("spark.hadoop.fs.azure.account.oauth2.client.secret", String.format("{{{{secrets/%s/%s}}}}", secretScope,secretKey)),
                Map.entry("spark.hadoop.fs.azure.account.oauth2.client.endpoint", String.format("https://login.microsoftonline.com/%s/oauth2/token", tenantId))
            ))
            .sqlConfigParams(Map.of("ANSI_MODE", "true"))
            .build());

    }
}
Copy
resources:
  this:
    type: databricks:SqlGlobalConfig
    properties:
      securityPolicy: DATA_ACCESS_CONTROL
      dataAccessConfig:
        spark.hadoop.fs.azure.account.auth.type: OAuth
        spark.hadoop.fs.azure.account.oauth.provider.type: org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider
        spark.hadoop.fs.azure.account.oauth2.client.id: ${applicationId}
        spark.hadoop.fs.azure.account.oauth2.client.secret: '{{secrets/${secretScope}/${secretKey}}}'
        spark.hadoop.fs.azure.account.oauth2.client.endpoint: https://login.microsoftonline.com/${tenantId}/oauth2/token
      sqlConfigParams:
        ANSI_MODE: 'true'
Copy

The following resources are often used in the same context:

  • End to end workspace management guide.
  • databricks.InstanceProfile to manage AWS EC2 instance profiles that users can launch databricks.Cluster and access data, like databricks_mount.
  • databricks.SqlDashboard to manage Databricks SQL Dashboards.
  • databricks.SqlEndpoint to manage Databricks SQL Endpoints.
  • databricks.SqlPermissions to manage data object access control lists in Databricks workspaces for things like tables, views, databases, and more.

Create SqlGlobalConfig Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new SqlGlobalConfig(name: string, args?: SqlGlobalConfigArgs, opts?: CustomResourceOptions);
@overload
def SqlGlobalConfig(resource_name: str,
                    args: Optional[SqlGlobalConfigArgs] = None,
                    opts: Optional[ResourceOptions] = None)

@overload
def SqlGlobalConfig(resource_name: str,
                    opts: Optional[ResourceOptions] = None,
                    data_access_config: Optional[Mapping[str, str]] = None,
                    enable_serverless_compute: Optional[bool] = None,
                    google_service_account: Optional[str] = None,
                    instance_profile_arn: Optional[str] = None,
                    security_policy: Optional[str] = None,
                    sql_config_params: Optional[Mapping[str, str]] = None)
func NewSqlGlobalConfig(ctx *Context, name string, args *SqlGlobalConfigArgs, opts ...ResourceOption) (*SqlGlobalConfig, error)
public SqlGlobalConfig(string name, SqlGlobalConfigArgs? args = null, CustomResourceOptions? opts = null)
public SqlGlobalConfig(String name, SqlGlobalConfigArgs args)
public SqlGlobalConfig(String name, SqlGlobalConfigArgs args, CustomResourceOptions options)
type: databricks:SqlGlobalConfig
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args SqlGlobalConfigArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args SqlGlobalConfigArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args SqlGlobalConfigArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args SqlGlobalConfigArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. SqlGlobalConfigArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var sqlGlobalConfigResource = new Databricks.SqlGlobalConfig("sqlGlobalConfigResource", new()
{
    DataAccessConfig = 
    {
        { "string", "string" },
    },
    GoogleServiceAccount = "string",
    InstanceProfileArn = "string",
    SecurityPolicy = "string",
    SqlConfigParams = 
    {
        { "string", "string" },
    },
});
Copy
example, err := databricks.NewSqlGlobalConfig(ctx, "sqlGlobalConfigResource", &databricks.SqlGlobalConfigArgs{
	DataAccessConfig: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	GoogleServiceAccount: pulumi.String("string"),
	InstanceProfileArn:   pulumi.String("string"),
	SecurityPolicy:       pulumi.String("string"),
	SqlConfigParams: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
})
Copy
var sqlGlobalConfigResource = new SqlGlobalConfig("sqlGlobalConfigResource", SqlGlobalConfigArgs.builder()
    .dataAccessConfig(Map.of("string", "string"))
    .googleServiceAccount("string")
    .instanceProfileArn("string")
    .securityPolicy("string")
    .sqlConfigParams(Map.of("string", "string"))
    .build());
Copy
sql_global_config_resource = databricks.SqlGlobalConfig("sqlGlobalConfigResource",
    data_access_config={
        "string": "string",
    },
    google_service_account="string",
    instance_profile_arn="string",
    security_policy="string",
    sql_config_params={
        "string": "string",
    })
Copy
const sqlGlobalConfigResource = new databricks.SqlGlobalConfig("sqlGlobalConfigResource", {
    dataAccessConfig: {
        string: "string",
    },
    googleServiceAccount: "string",
    instanceProfileArn: "string",
    securityPolicy: "string",
    sqlConfigParams: {
        string: "string",
    },
});
Copy
type: databricks:SqlGlobalConfig
properties:
    dataAccessConfig:
        string: string
    googleServiceAccount: string
    instanceProfileArn: string
    securityPolicy: string
    sqlConfigParams:
        string: string
Copy

SqlGlobalConfig Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The SqlGlobalConfig resource accepts the following input properties:

DataAccessConfig Dictionary<string, string>
Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
EnableServerlessCompute bool

Deprecated: This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

GoogleServiceAccount string
used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
InstanceProfileArn string
databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
SecurityPolicy string
The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values
SqlConfigParams Dictionary<string, string>
SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
DataAccessConfig map[string]string
Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
EnableServerlessCompute bool

Deprecated: This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

GoogleServiceAccount string
used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
InstanceProfileArn string
databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
SecurityPolicy string
The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values
SqlConfigParams map[string]string
SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
dataAccessConfig Map<String,String>
Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
enableServerlessCompute Boolean

Deprecated: This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

googleServiceAccount String
used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
instanceProfileArn String
databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
securityPolicy String
The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values
sqlConfigParams Map<String,String>
SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
dataAccessConfig {[key: string]: string}
Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
enableServerlessCompute boolean

Deprecated: This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

googleServiceAccount string
used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
instanceProfileArn string
databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
securityPolicy string
The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values
sqlConfigParams {[key: string]: string}
SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
data_access_config Mapping[str, str]
Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
enable_serverless_compute bool

Deprecated: This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

google_service_account str
used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
instance_profile_arn str
databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
security_policy str
The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values
sql_config_params Mapping[str, str]
SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
dataAccessConfig Map<String>
Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
enableServerlessCompute Boolean

Deprecated: This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

googleServiceAccount String
used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
instanceProfileArn String
databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
securityPolicy String
The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values
sqlConfigParams Map<String>
SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.

Outputs

All input properties are implicitly available as output properties. Additionally, the SqlGlobalConfig resource produces the following output properties:

Id string
The provider-assigned unique ID for this managed resource.
Id string
The provider-assigned unique ID for this managed resource.
id String
The provider-assigned unique ID for this managed resource.
id string
The provider-assigned unique ID for this managed resource.
id str
The provider-assigned unique ID for this managed resource.
id String
The provider-assigned unique ID for this managed resource.

Look up Existing SqlGlobalConfig Resource

Get an existing SqlGlobalConfig resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: SqlGlobalConfigState, opts?: CustomResourceOptions): SqlGlobalConfig
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        data_access_config: Optional[Mapping[str, str]] = None,
        enable_serverless_compute: Optional[bool] = None,
        google_service_account: Optional[str] = None,
        instance_profile_arn: Optional[str] = None,
        security_policy: Optional[str] = None,
        sql_config_params: Optional[Mapping[str, str]] = None) -> SqlGlobalConfig
func GetSqlGlobalConfig(ctx *Context, name string, id IDInput, state *SqlGlobalConfigState, opts ...ResourceOption) (*SqlGlobalConfig, error)
public static SqlGlobalConfig Get(string name, Input<string> id, SqlGlobalConfigState? state, CustomResourceOptions? opts = null)
public static SqlGlobalConfig get(String name, Output<String> id, SqlGlobalConfigState state, CustomResourceOptions options)
resources:  _:    type: databricks:SqlGlobalConfig    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
DataAccessConfig Dictionary<string, string>
Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
EnableServerlessCompute bool

Deprecated: This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

GoogleServiceAccount string
used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
InstanceProfileArn string
databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
SecurityPolicy string
The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values
SqlConfigParams Dictionary<string, string>
SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
DataAccessConfig map[string]string
Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
EnableServerlessCompute bool

Deprecated: This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

GoogleServiceAccount string
used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
InstanceProfileArn string
databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
SecurityPolicy string
The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values
SqlConfigParams map[string]string
SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
dataAccessConfig Map<String,String>
Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
enableServerlessCompute Boolean

Deprecated: This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

googleServiceAccount String
used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
instanceProfileArn String
databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
securityPolicy String
The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values
sqlConfigParams Map<String,String>
SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
dataAccessConfig {[key: string]: string}
Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
enableServerlessCompute boolean

Deprecated: This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

googleServiceAccount string
used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
instanceProfileArn string
databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
securityPolicy string
The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values
sqlConfigParams {[key: string]: string}
SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
data_access_config Mapping[str, str]
Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
enable_serverless_compute bool

Deprecated: This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

google_service_account str
used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
instance_profile_arn str
databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
security_policy str
The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values
sql_config_params Mapping[str, str]
SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
dataAccessConfig Map<String>
Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
enableServerlessCompute Boolean

Deprecated: This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

googleServiceAccount String
used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
instanceProfileArn String
databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
securityPolicy String
The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values
sqlConfigParams Map<String>
SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.

Import

You can import a databricks_sql_global_config resource with command like the following (you need to use global as ID):

bash

$ pulumi import databricks:index/sqlGlobalConfig:SqlGlobalConfig this global
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
databricks pulumi/pulumi-databricks
License
Apache-2.0
Notes
This Pulumi package is based on the databricks Terraform Provider.