1. Packages
  2. Databricks Provider
  3. API Docs
  4. User
Databricks v1.68.0 published on Friday, Apr 25, 2025 by Pulumi

databricks.User

Explore with Pulumi AI

This resource allows you to manage users in Databricks Workspace, Databricks Account Console or Azure Databricks Account Console. You can also associate Databricks users to databricks_group. Upon user creation the user will receive a welcome email. You can also get information about caller identity using databricks.getCurrentUser data source.

To assign account level users to workspace use databricks_mws_permission_assignment.

Entitlements, like, allow_cluster_create, allow_instance_pool_create, databricks_sql_access, workspace_access applicable only for workspace-level users. Use databricks.Entitlements resource to assign entitlements inside a workspace to account-level users.

To create users in the Databricks account, the provider must be configured with host = "https://accounts.cloud.databricks.com" on AWS deployments or host = "https://accounts.azuredatabricks.net" and authenticate using AAD tokens on Azure deployments.

The default behavior when deleting a databricks.User resource depends on whether the provider is configured at the workspace-level or account-level. When the provider is configured at the workspace-level, the user will be deleted from the workspace. When the provider is configured at the account-level, the user will be deactivated but not deleted. When the provider is configured at the account level, to delete the user from the account when the resource is deleted, set disable_as_user_deletion = false. Conversely, when the provider is configured at the account-level, to deactivate the user when the resource is deleted, set disable_as_user_deletion = true.

Example Usage

Creating regular user:

import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const me = new databricks.User("me", {userName: "me@example.com"});
Copy
import pulumi
import pulumi_databricks as databricks

me = databricks.User("me", user_name="me@example.com")
Copy
package main

import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := databricks.NewUser(ctx, "me", &databricks.UserArgs{
			UserName: pulumi.String("me@example.com"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var me = new Databricks.User("me", new()
    {
        UserName = "me@example.com",
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.User;
import com.pulumi.databricks.UserArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var me = new User("me", UserArgs.builder()
            .userName("me@example.com")
            .build());

    }
}
Copy
resources:
  me:
    type: databricks:User
    properties:
      userName: me@example.com
Copy

Creating user with administrative permissions - referencing special admins databricks.Group in databricks.GroupMember resource:

import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const admins = databricks.getGroup({
    displayName: "admins",
});
const me = new databricks.User("me", {userName: "me@example.com"});
const i_am_admin = new databricks.GroupMember("i-am-admin", {
    groupId: admins.then(admins => admins.id),
    memberId: me.id,
});
Copy
import pulumi
import pulumi_databricks as databricks

admins = databricks.get_group(display_name="admins")
me = databricks.User("me", user_name="me@example.com")
i_am_admin = databricks.GroupMember("i-am-admin",
    group_id=admins.id,
    member_id=me.id)
Copy
package main

import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		admins, err := databricks.LookupGroup(ctx, &databricks.LookupGroupArgs{
			DisplayName: "admins",
		}, nil)
		if err != nil {
			return err
		}
		me, err := databricks.NewUser(ctx, "me", &databricks.UserArgs{
			UserName: pulumi.String("me@example.com"),
		})
		if err != nil {
			return err
		}
		_, err = databricks.NewGroupMember(ctx, "i-am-admin", &databricks.GroupMemberArgs{
			GroupId:  pulumi.String(admins.Id),
			MemberId: me.ID(),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var admins = Databricks.GetGroup.Invoke(new()
    {
        DisplayName = "admins",
    });

    var me = new Databricks.User("me", new()
    {
        UserName = "me@example.com",
    });

    var i_am_admin = new Databricks.GroupMember("i-am-admin", new()
    {
        GroupId = admins.Apply(getGroupResult => getGroupResult.Id),
        MemberId = me.Id,
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.DatabricksFunctions;
import com.pulumi.databricks.inputs.GetGroupArgs;
import com.pulumi.databricks.User;
import com.pulumi.databricks.UserArgs;
import com.pulumi.databricks.GroupMember;
import com.pulumi.databricks.GroupMemberArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        final var admins = DatabricksFunctions.getGroup(GetGroupArgs.builder()
            .displayName("admins")
            .build());

        var me = new User("me", UserArgs.builder()
            .userName("me@example.com")
            .build());

        var i_am_admin = new GroupMember("i-am-admin", GroupMemberArgs.builder()
            .groupId(admins.id())
            .memberId(me.id())
            .build());

    }
}
Copy
resources:
  me:
    type: databricks:User
    properties:
      userName: me@example.com
  i-am-admin:
    type: databricks:GroupMember
    properties:
      groupId: ${admins.id}
      memberId: ${me.id}
variables:
  admins:
    fn::invoke:
      function: databricks:getGroup
      arguments:
        displayName: admins
Copy

Creating user with cluster create permissions:

import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const me = new databricks.User("me", {
    userName: "me@example.com",
    displayName: "Example user",
    allowClusterCreate: true,
});
Copy
import pulumi
import pulumi_databricks as databricks

me = databricks.User("me",
    user_name="me@example.com",
    display_name="Example user",
    allow_cluster_create=True)
Copy
package main

import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := databricks.NewUser(ctx, "me", &databricks.UserArgs{
			UserName:           pulumi.String("me@example.com"),
			DisplayName:        pulumi.String("Example user"),
			AllowClusterCreate: pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var me = new Databricks.User("me", new()
    {
        UserName = "me@example.com",
        DisplayName = "Example user",
        AllowClusterCreate = true,
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.User;
import com.pulumi.databricks.UserArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var me = new User("me", UserArgs.builder()
            .userName("me@example.com")
            .displayName("Example user")
            .allowClusterCreate(true)
            .build());

    }
}
Copy
resources:
  me:
    type: databricks:User
    properties:
      userName: me@example.com
      displayName: Example user
      allowClusterCreate: true
Copy

Creating user in AWS Databricks account:

import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const accountUser = new databricks.User("account_user", {
    userName: "me@example.com",
    displayName: "Example user",
});
Copy
import pulumi
import pulumi_databricks as databricks

account_user = databricks.User("account_user",
    user_name="me@example.com",
    display_name="Example user")
Copy
package main

import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := databricks.NewUser(ctx, "account_user", &databricks.UserArgs{
			UserName:    pulumi.String("me@example.com"),
			DisplayName: pulumi.String("Example user"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var accountUser = new Databricks.User("account_user", new()
    {
        UserName = "me@example.com",
        DisplayName = "Example user",
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.User;
import com.pulumi.databricks.UserArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var accountUser = new User("accountUser", UserArgs.builder()
            .userName("me@example.com")
            .displayName("Example user")
            .build());

    }
}
Copy
resources:
  accountUser:
    type: databricks:User
    name: account_user
    properties:
      userName: me@example.com
      displayName: Example user
Copy

Creating user in Azure Databricks account:

import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const accountUser = new databricks.User("account_user", {
    userName: "me@example.com",
    displayName: "Example user",
});
Copy
import pulumi
import pulumi_databricks as databricks

account_user = databricks.User("account_user",
    user_name="me@example.com",
    display_name="Example user")
Copy
package main

import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := databricks.NewUser(ctx, "account_user", &databricks.UserArgs{
			UserName:    pulumi.String("me@example.com"),
			DisplayName: pulumi.String("Example user"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var accountUser = new Databricks.User("account_user", new()
    {
        UserName = "me@example.com",
        DisplayName = "Example user",
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.User;
import com.pulumi.databricks.UserArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var accountUser = new User("accountUser", UserArgs.builder()
            .userName("me@example.com")
            .displayName("Example user")
            .build());

    }
}
Copy
resources:
  accountUser:
    type: databricks:User
    name: account_user
    properties:
      userName: me@example.com
      displayName: Example user
Copy

The following resources are often used in the same context:

  • End to end workspace management guide.
  • databricks.Group to manage groups in Databricks Workspace or Account Console (for AWS deployments).
  • databricks.Group data to retrieve information about databricks.Group members, entitlements and instance profiles.
  • databricks.GroupInstanceProfile to attach databricks.InstanceProfile (AWS) to databricks_group.
  • databricks.GroupMember to attach users and groups as group members.
  • databricks.InstanceProfile to manage AWS EC2 instance profiles that users can launch databricks.Cluster and access data, like databricks_mount.
  • databricks.User data to retrieve information about databricks_user.

Create User Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new User(name: string, args: UserArgs, opts?: CustomResourceOptions);
@overload
def User(resource_name: str,
         args: UserArgs,
         opts: Optional[ResourceOptions] = None)

@overload
def User(resource_name: str,
         opts: Optional[ResourceOptions] = None,
         user_name: Optional[str] = None,
         display_name: Optional[str] = None,
         force: Optional[bool] = None,
         allow_instance_pool_create: Optional[bool] = None,
         databricks_sql_access: Optional[bool] = None,
         disable_as_user_deletion: Optional[bool] = None,
         acl_principal_id: Optional[str] = None,
         external_id: Optional[str] = None,
         allow_cluster_create: Optional[bool] = None,
         force_delete_home_dir: Optional[bool] = None,
         force_delete_repos: Optional[bool] = None,
         home: Optional[str] = None,
         repos: Optional[str] = None,
         active: Optional[bool] = None,
         workspace_access: Optional[bool] = None)
func NewUser(ctx *Context, name string, args UserArgs, opts ...ResourceOption) (*User, error)
public User(string name, UserArgs args, CustomResourceOptions? opts = null)
public User(String name, UserArgs args)
public User(String name, UserArgs args, CustomResourceOptions options)
type: databricks:User
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. UserArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. UserArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. UserArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. UserArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. UserArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var userResource = new Databricks.User("userResource", new()
{
    UserName = "string",
    DisplayName = "string",
    Force = false,
    AllowInstancePoolCreate = false,
    DatabricksSqlAccess = false,
    DisableAsUserDeletion = false,
    AclPrincipalId = "string",
    ExternalId = "string",
    AllowClusterCreate = false,
    ForceDeleteHomeDir = false,
    ForceDeleteRepos = false,
    Home = "string",
    Repos = "string",
    Active = false,
    WorkspaceAccess = false,
});
Copy
example, err := databricks.NewUser(ctx, "userResource", &databricks.UserArgs{
	UserName:                pulumi.String("string"),
	DisplayName:             pulumi.String("string"),
	Force:                   pulumi.Bool(false),
	AllowInstancePoolCreate: pulumi.Bool(false),
	DatabricksSqlAccess:     pulumi.Bool(false),
	DisableAsUserDeletion:   pulumi.Bool(false),
	AclPrincipalId:          pulumi.String("string"),
	ExternalId:              pulumi.String("string"),
	AllowClusterCreate:      pulumi.Bool(false),
	ForceDeleteHomeDir:      pulumi.Bool(false),
	ForceDeleteRepos:        pulumi.Bool(false),
	Home:                    pulumi.String("string"),
	Repos:                   pulumi.String("string"),
	Active:                  pulumi.Bool(false),
	WorkspaceAccess:         pulumi.Bool(false),
})
Copy
var userResource = new User("userResource", UserArgs.builder()
    .userName("string")
    .displayName("string")
    .force(false)
    .allowInstancePoolCreate(false)
    .databricksSqlAccess(false)
    .disableAsUserDeletion(false)
    .aclPrincipalId("string")
    .externalId("string")
    .allowClusterCreate(false)
    .forceDeleteHomeDir(false)
    .forceDeleteRepos(false)
    .home("string")
    .repos("string")
    .active(false)
    .workspaceAccess(false)
    .build());
Copy
user_resource = databricks.User("userResource",
    user_name="string",
    display_name="string",
    force=False,
    allow_instance_pool_create=False,
    databricks_sql_access=False,
    disable_as_user_deletion=False,
    acl_principal_id="string",
    external_id="string",
    allow_cluster_create=False,
    force_delete_home_dir=False,
    force_delete_repos=False,
    home="string",
    repos="string",
    active=False,
    workspace_access=False)
Copy
const userResource = new databricks.User("userResource", {
    userName: "string",
    displayName: "string",
    force: false,
    allowInstancePoolCreate: false,
    databricksSqlAccess: false,
    disableAsUserDeletion: false,
    aclPrincipalId: "string",
    externalId: "string",
    allowClusterCreate: false,
    forceDeleteHomeDir: false,
    forceDeleteRepos: false,
    home: "string",
    repos: "string",
    active: false,
    workspaceAccess: false,
});
Copy
type: databricks:User
properties:
    aclPrincipalId: string
    active: false
    allowClusterCreate: false
    allowInstancePoolCreate: false
    databricksSqlAccess: false
    disableAsUserDeletion: false
    displayName: string
    externalId: string
    force: false
    forceDeleteHomeDir: false
    forceDeleteRepos: false
    home: string
    repos: string
    userName: string
    workspaceAccess: false
Copy

User Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The User resource accepts the following input properties:

UserName
This property is required.
Changes to this property will trigger replacement.
string
This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
AclPrincipalId string
identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
Active bool
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
AllowClusterCreate bool
Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
AllowInstancePoolCreate bool
Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
DatabricksSqlAccess bool
This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
DisableAsUserDeletion bool
Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to true when the provider is configured at the account-level and false when configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
DisplayName string
This is an alias for the username that can be the full name of the user.
ExternalId string
ID of the user in an external identity provider.
Force bool
Ignore cannot create user: User with username X already exists errors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
ForceDeleteHomeDir bool
This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
ForceDeleteRepos bool
This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
Home string
Home folder of the user, e.g. /Users/mr.foo@example.com.
Repos string
Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
WorkspaceAccess bool
UserName
This property is required.
Changes to this property will trigger replacement.
string
This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
AclPrincipalId string
identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
Active bool
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
AllowClusterCreate bool
Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
AllowInstancePoolCreate bool
Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
DatabricksSqlAccess bool
This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
DisableAsUserDeletion bool
Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to true when the provider is configured at the account-level and false when configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
DisplayName string
This is an alias for the username that can be the full name of the user.
ExternalId string
ID of the user in an external identity provider.
Force bool
Ignore cannot create user: User with username X already exists errors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
ForceDeleteHomeDir bool
This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
ForceDeleteRepos bool
This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
Home string
Home folder of the user, e.g. /Users/mr.foo@example.com.
Repos string
Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
WorkspaceAccess bool
userName
This property is required.
Changes to this property will trigger replacement.
String
This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
aclPrincipalId String
identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
active Boolean
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
allowClusterCreate Boolean
Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
allowInstancePoolCreate Boolean
Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
databricksSqlAccess Boolean
This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
disableAsUserDeletion Boolean
Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to true when the provider is configured at the account-level and false when configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
displayName String
This is an alias for the username that can be the full name of the user.
externalId String
ID of the user in an external identity provider.
force Boolean
Ignore cannot create user: User with username X already exists errors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
forceDeleteHomeDir Boolean
This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
forceDeleteRepos Boolean
This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
home String
Home folder of the user, e.g. /Users/mr.foo@example.com.
repos String
Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
workspaceAccess Boolean
userName
This property is required.
Changes to this property will trigger replacement.
string
This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
aclPrincipalId string
identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
active boolean
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
allowClusterCreate boolean
Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
allowInstancePoolCreate boolean
Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
databricksSqlAccess boolean
This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
disableAsUserDeletion boolean
Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to true when the provider is configured at the account-level and false when configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
displayName string
This is an alias for the username that can be the full name of the user.
externalId string
ID of the user in an external identity provider.
force boolean
Ignore cannot create user: User with username X already exists errors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
forceDeleteHomeDir boolean
This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
forceDeleteRepos boolean
This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
home string
Home folder of the user, e.g. /Users/mr.foo@example.com.
repos string
Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
workspaceAccess boolean
user_name
This property is required.
Changes to this property will trigger replacement.
str
This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
acl_principal_id str
identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
active bool
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
allow_cluster_create bool
Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
allow_instance_pool_create bool
Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
databricks_sql_access bool
This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
disable_as_user_deletion bool
Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to true when the provider is configured at the account-level and false when configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
display_name str
This is an alias for the username that can be the full name of the user.
external_id str
ID of the user in an external identity provider.
force bool
Ignore cannot create user: User with username X already exists errors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
force_delete_home_dir bool
This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
force_delete_repos bool
This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
home str
Home folder of the user, e.g. /Users/mr.foo@example.com.
repos str
Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
workspace_access bool
userName
This property is required.
Changes to this property will trigger replacement.
String
This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
aclPrincipalId String
identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
active Boolean
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
allowClusterCreate Boolean
Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
allowInstancePoolCreate Boolean
Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
databricksSqlAccess Boolean
This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
disableAsUserDeletion Boolean
Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to true when the provider is configured at the account-level and false when configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
displayName String
This is an alias for the username that can be the full name of the user.
externalId String
ID of the user in an external identity provider.
force Boolean
Ignore cannot create user: User with username X already exists errors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
forceDeleteHomeDir Boolean
This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
forceDeleteRepos Boolean
This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
home String
Home folder of the user, e.g. /Users/mr.foo@example.com.
repos String
Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
workspaceAccess Boolean

Outputs

All input properties are implicitly available as output properties. Additionally, the User resource produces the following output properties:

Id string
The provider-assigned unique ID for this managed resource.
Id string
The provider-assigned unique ID for this managed resource.
id String
The provider-assigned unique ID for this managed resource.
id string
The provider-assigned unique ID for this managed resource.
id str
The provider-assigned unique ID for this managed resource.
id String
The provider-assigned unique ID for this managed resource.

Look up Existing User Resource

Get an existing User resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: UserState, opts?: CustomResourceOptions): User
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        acl_principal_id: Optional[str] = None,
        active: Optional[bool] = None,
        allow_cluster_create: Optional[bool] = None,
        allow_instance_pool_create: Optional[bool] = None,
        databricks_sql_access: Optional[bool] = None,
        disable_as_user_deletion: Optional[bool] = None,
        display_name: Optional[str] = None,
        external_id: Optional[str] = None,
        force: Optional[bool] = None,
        force_delete_home_dir: Optional[bool] = None,
        force_delete_repos: Optional[bool] = None,
        home: Optional[str] = None,
        repos: Optional[str] = None,
        user_name: Optional[str] = None,
        workspace_access: Optional[bool] = None) -> User
func GetUser(ctx *Context, name string, id IDInput, state *UserState, opts ...ResourceOption) (*User, error)
public static User Get(string name, Input<string> id, UserState? state, CustomResourceOptions? opts = null)
public static User get(String name, Output<String> id, UserState state, CustomResourceOptions options)
resources:  _:    type: databricks:User    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
AclPrincipalId string
identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
Active bool
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
AllowClusterCreate bool
Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
AllowInstancePoolCreate bool
Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
DatabricksSqlAccess bool
This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
DisableAsUserDeletion bool
Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to true when the provider is configured at the account-level and false when configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
DisplayName string
This is an alias for the username that can be the full name of the user.
ExternalId string
ID of the user in an external identity provider.
Force bool
Ignore cannot create user: User with username X already exists errors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
ForceDeleteHomeDir bool
This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
ForceDeleteRepos bool
This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
Home string
Home folder of the user, e.g. /Users/mr.foo@example.com.
Repos string
Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
UserName Changes to this property will trigger replacement. string
This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
WorkspaceAccess bool
AclPrincipalId string
identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
Active bool
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
AllowClusterCreate bool
Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
AllowInstancePoolCreate bool
Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
DatabricksSqlAccess bool
This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
DisableAsUserDeletion bool
Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to true when the provider is configured at the account-level and false when configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
DisplayName string
This is an alias for the username that can be the full name of the user.
ExternalId string
ID of the user in an external identity provider.
Force bool
Ignore cannot create user: User with username X already exists errors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
ForceDeleteHomeDir bool
This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
ForceDeleteRepos bool
This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
Home string
Home folder of the user, e.g. /Users/mr.foo@example.com.
Repos string
Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
UserName Changes to this property will trigger replacement. string
This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
WorkspaceAccess bool
aclPrincipalId String
identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
active Boolean
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
allowClusterCreate Boolean
Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
allowInstancePoolCreate Boolean
Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
databricksSqlAccess Boolean
This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
disableAsUserDeletion Boolean
Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to true when the provider is configured at the account-level and false when configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
displayName String
This is an alias for the username that can be the full name of the user.
externalId String
ID of the user in an external identity provider.
force Boolean
Ignore cannot create user: User with username X already exists errors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
forceDeleteHomeDir Boolean
This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
forceDeleteRepos Boolean
This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
home String
Home folder of the user, e.g. /Users/mr.foo@example.com.
repos String
Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
userName Changes to this property will trigger replacement. String
This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
workspaceAccess Boolean
aclPrincipalId string
identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
active boolean
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
allowClusterCreate boolean
Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
allowInstancePoolCreate boolean
Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
databricksSqlAccess boolean
This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
disableAsUserDeletion boolean
Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to true when the provider is configured at the account-level and false when configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
displayName string
This is an alias for the username that can be the full name of the user.
externalId string
ID of the user in an external identity provider.
force boolean
Ignore cannot create user: User with username X already exists errors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
forceDeleteHomeDir boolean
This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
forceDeleteRepos boolean
This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
home string
Home folder of the user, e.g. /Users/mr.foo@example.com.
repos string
Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
userName Changes to this property will trigger replacement. string
This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
workspaceAccess boolean
acl_principal_id str
identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
active bool
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
allow_cluster_create bool
Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
allow_instance_pool_create bool
Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
databricks_sql_access bool
This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
disable_as_user_deletion bool
Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to true when the provider is configured at the account-level and false when configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
display_name str
This is an alias for the username that can be the full name of the user.
external_id str
ID of the user in an external identity provider.
force bool
Ignore cannot create user: User with username X already exists errors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
force_delete_home_dir bool
This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
force_delete_repos bool
This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
home str
Home folder of the user, e.g. /Users/mr.foo@example.com.
repos str
Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
user_name Changes to this property will trigger replacement. str
This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
workspace_access bool
aclPrincipalId String
identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
active Boolean
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
allowClusterCreate Boolean
Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
allowInstancePoolCreate Boolean
Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
databricksSqlAccess Boolean
This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
disableAsUserDeletion Boolean
Deactivate the user when deleting the resource, rather than deleting the user entirely. Defaults to true when the provider is configured at the account-level and false when configured at the workspace-level. This flag is exclusive to force_delete_repos and force_delete_home_dir flags.
displayName String
This is an alias for the username that can be the full name of the user.
externalId String
ID of the user in an external identity provider.
force Boolean
Ignore cannot create user: User with username X already exists errors and implicitly import the specific user into Pulumi state, enforcing entitlements defined in the instance of resource. This functionality is experimental and is designed to simplify corner cases, like Azure Active Directory synchronisation.
forceDeleteHomeDir Boolean
This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
forceDeleteRepos Boolean
This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
home String
Home folder of the user, e.g. /Users/mr.foo@example.com.
repos String
Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
userName Changes to this property will trigger replacement. String
This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
workspaceAccess Boolean

Import

The resource scim user can be imported using id:

bash

$ pulumi import databricks:index/user:User me <user-id>
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
databricks pulumi/pulumi-databricks
License
Apache-2.0
Notes
This Pulumi package is based on the databricks Terraform Provider.