-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdatabricks_template_schema.json
More file actions
225 lines (207 loc) · 10.4 KB
/
databricks_template_schema.json
File metadata and controls
225 lines (207 loc) · 10.4 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
{
"welcome_message": "\nWelcome to the Databricks Multi-Environment Bundle Template!\n\nThis template creates a production-ready Databricks Asset Bundle with:\n- Multi-environment deployment (user/dev/stage/prod)\n- Configurable compute (serverless or classic)\n- Optional RBAC permissions\n- Sample ETL jobs and pipelines\n- Unity Catalog schema management\n- CI/CD pipeline templates (Azure DevOps, GitHub Actions, GitLab)\n\nLet's configure your project...\n",
"properties": {
"project_name": {
"type": "string",
"description": "\nProject name (used for bundle name, folder, and resource prefixes)\nMust start with a letter; only letters, numbers, and underscores allowed.\nproject_name",
"order": 1,
"pattern": "^[A-Za-z][A-Za-z0-9_]*$",
"pattern_match_failure_message": "Project name must start with a letter and contain only letters, numbers, and underscores."
},
"environment_setup": {
"type": "string",
"default": "full",
"description": "\n========================================\n\nHow many deployment environments do you need?\nType 'full' or 'minimal':\n- full: user (local dev), stage (pre-prod), prod\n- minimal: user (local dev), stage (shared testing)\nenvironment_setup",
"order": 2,
"pattern": "^(full|minimal)$",
"pattern_match_failure_message": "Please enter 'full' or 'minimal'."
},
"include_dev_environment": {
"type": "string",
"default": "no",
"description": "\n========================================\n\nInclude a shared development environment (dev)?\nType 'yes' or 'no':\n- yes: Add 'dev' target between user and stage (for nightly builds, shared testing)\n- no: Skip dev, developers test in their own 'user' environment before stage\n\nMost teams don't need this - developers use 'user' for personal testing,\nthen deploy to 'stage' for integration testing.\ninclude_dev_environment",
"order": 3,
"pattern": "^(yes|no)$",
"pattern_match_failure_message": "Please enter 'yes' or 'no'.",
"skip_prompt_if": {
"properties": {
"environment_setup": {
"const": "minimal"
}
}
}
},
"compute_type": {
"type": "string",
"default": "classic",
"description": "\n========================================\n\nWhat compute type should jobs and pipelines use?\nType 'classic', 'serverless', or 'both':\n- classic: Resource-specific clusters (spin up when job runs, more control)\n- serverless: Databricks-managed compute (no cluster config, faster start)\n- both: Include both options (classic active, serverless commented)\n\nNote: Classic is recommended as default since not all workspaces support serverless.\ncompute_type",
"order": 4,
"pattern": "^(classic|serverless|both)$",
"pattern_match_failure_message": "Please enter 'classic', 'serverless', or 'both'."
},
"cloud_provider": {
"type": "string",
"default": "azure",
"description": "\n========================================\n\nWhich cloud provider is your Databricks workspace on?\nType 'azure', 'aws', or 'gcp':\nThis determines cluster node types and CI/CD authentication method.\ncloud_provider",
"order": 5,
"pattern": "^(azure|aws|gcp)$",
"pattern_match_failure_message": "Please enter 'azure', 'aws', or 'gcp'."
},
"workspace_setup": {
"type": "string",
"default": "single_workspace",
"description": "\n========================================\n\nWorkspace topology for your environments?\nType 'single_workspace' or 'multi_workspace':\n- single_workspace: All environments share one Databricks workspace (Unity Catalog provides isolation)\n- multi_workspace: Separate workspaces per environment (Databricks recommended for production)\n\nSee: https://docs.databricks.com/aws/en/dev-tools/ci-cd/best-practices\nworkspace_setup",
"order": 6,
"pattern": "^(single_workspace|multi_workspace)$",
"pattern_match_failure_message": "Please enter 'single_workspace' or 'multi_workspace'."
},
"uc_catalog_suffix": {
"type": "string",
"default": "my_domain",
"description": "\n========================================\n\nUnity Catalog suffix for catalog names.\nCatalogs will be named: dev_<suffix>, stage_<suffix>, prod_<suffix>\nThese catalogs must already exist (created by your platform/infra team).\n\nThe 'user' target shares the dev catalog with per-user schema prefixes\nfor isolation (e.g., jsmith_bronze, jsmith_silver, jsmith_gold).\n\nExamples: 'sales', 'marketing', 'analytics', 'my_project'\nMust be lowercase, start with a letter, only letters/numbers/underscores.\nuc_catalog_suffix",
"order": 7,
"pattern": "^[a-z][a-z0-9_]*$",
"pattern_match_failure_message": "Catalog suffix must be lowercase, start with a letter, and contain only letters, numbers, and underscores."
},
"include_permissions": {
"type": "string",
"default": "yes",
"description": "\n========================================\n\nInclude comprehensive permissions/RBAC configuration?\nType 'yes' or 'no':\n- yes: Full permission setup with environment-aware groups (developers, qa_team, analytics_team; operations_team in full mode)\n- no: No permissions blocks (you'll configure access manually)\ninclude_permissions",
"order": 8,
"pattern": "^(yes|no)$",
"pattern_match_failure_message": "Please enter 'yes' or 'no'."
},
"configure_sp_now": {
"type": "string",
"default": "no",
"description": "\n========================================\n\nConfigure service principal IDs now?\nType 'yes' or 'no':\n- yes: Enter SP application IDs for each environment\n- no: Leave placeholders (search for 'SP_PLACEHOLDER' later to replace)\n\nService principals are required for CI/CD deployments to dev/stage/prod.\nYou can use the same SP ID for multiple environments if needed.\nconfigure_sp_now",
"order": 9,
"pattern": "^(yes|no)$",
"pattern_match_failure_message": "Please enter 'yes' or 'no'."
},
"dev_service_principal": {
"type": "string",
"default": "",
"description": "\n========================================\n\nService Principal Application ID for the DEV environment.\nLeave empty to use placeholder.\ndev_service_principal",
"order": 10,
"skip_prompt_if": {
"anyOf": [
{
"properties": {
"configure_sp_now": {
"const": "no"
}
}
},
{
"properties": {
"include_dev_environment": {
"const": "no"
}
}
}
]
}
},
"stage_service_principal": {
"type": "string",
"default": "",
"description": "\n========================================\n\nService Principal Application ID for the STAGE environment.\nLeave empty to use placeholder.\nstage_service_principal",
"order": 11,
"skip_prompt_if": {
"properties": {
"configure_sp_now": {
"const": "no"
}
}
}
},
"prod_service_principal": {
"type": "string",
"default": "",
"description": "\n========================================\n\nService Principal Application ID for the PROD environment.\nLeave empty to use placeholder.\nprod_service_principal",
"order": 12,
"skip_prompt_if": {
"anyOf": [
{
"properties": {
"configure_sp_now": {
"const": "no"
}
}
},
{
"properties": {
"environment_setup": {
"const": "minimal"
}
}
}
]
}
},
"include_cicd": {
"type": "string",
"default": "yes",
"description": "\n========================================\n\nInclude CI/CD pipeline templates?\nType 'yes' or 'no':\n- yes: Generate CI/CD workflows for automated bundle validation and deployment\n- no: Skip CI/CD (you'll set up pipelines manually)\ninclude_cicd",
"order": 13,
"pattern": "^(yes|no)$",
"pattern_match_failure_message": "Please enter 'yes' or 'no'."
},
"cicd_platform": {
"type": "string",
"default": "azure_devops",
"description": "\n========================================\n\nWhich CI/CD platform do you use?\nType 'azure_devops', 'github_actions', or 'gitlab':\n- azure_devops: Azure DevOps Pipelines (YAML)\n- github_actions: GitHub Actions workflows\n- gitlab: GitLab CI/CD pipelines\ncicd_platform",
"order": 14,
"pattern": "^(azure_devops|github_actions|gitlab)$",
"pattern_match_failure_message": "Please enter 'azure_devops', 'github_actions', or 'gitlab'.",
"skip_prompt_if": {
"properties": {
"include_cicd": {
"const": "no"
}
}
}
},
"default_branch": {
"type": "string",
"default": "main",
"description": "\n========================================\n\nName of your default branch.\nStaging deployments trigger when PRs are merged into this branch.\ndefault_branch",
"order": 15,
"skip_prompt_if": {
"properties": {
"include_cicd": {
"const": "no"
}
}
}
},
"release_branch": {
"type": "string",
"default": "release",
"description": "\n========================================\n\nName of your release/production branch.\nProduction deployments trigger when PRs are merged into this branch.\nrelease_branch",
"order": 16,
"skip_prompt_if": {
"anyOf": [
{
"properties": {
"include_cicd": {
"const": "no"
}
}
},
{
"properties": {
"environment_setup": {
"const": "minimal"
}
}
}
]
}
}
},
"success_message": "\n========================================\n\n\nYour project '{{.project_name}}' has been created successfully!\n\nNext steps:\n1. cd {{.project_name}}\n2. Review and update the configuration files\n3. Run: databricks bundle validate -t user\n4. Run: databricks bundle deploy -t user\n\nFor detailed setup instructions, see:\n- README.md - Project overview and quickstart\n- docs/PERMISSIONS_SETUP.md - RBAC configuration\n- docs/CI_CD_SETUP.md - CI/CD pipeline configuration\n",
"min_databricks_cli_version": "v0.274.0",
"version": 1
}