Skip to content

Commit f0c4d5e

Browse files
feat: Migration script users support (#4262)
Added support for users in the migration script
1 parent 8ad8fda commit f0c4d5e

14 files changed

+1888
-11
lines changed

pkg/scripts/migration_script/README.md

Lines changed: 44 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -108,6 +108,26 @@ where script options are:
108108
Supported resources:
109109
- snowflake_database_role
110110

111+
- `users` which expects a converted CSV output from the snowflake_users data source.
112+
To support object parameters, one should use the SHOW PARAMETERS output, and combine it with the SHOW USERS output, so the CSV header looks like `"comment","created_on",...,"abort_detached_query_value","abort_detached_query_level","timezone_value","timezone_level",...`
113+
When the additional columns are present, the resulting resource will have the parameters values, if the parameter level is set to "USER".
114+
115+
Caution: password parameter is not supported as it is returned in the form of `"***"` from the data source.
116+
117+
Note: Newlines are allowed only in the `comment`, `rsa_public_key` and `rsa_public_key2` fields, they might cause errors and require manual corrections elsewhere.
118+
119+
For more details about using multiple sources, visit the [Multiple sources section](#multiple-sources).
120+
121+
Different user types are mapped to their respective Terraform resources based on the `type` attribute:
122+
- `PERSON` (or empty) → `snowflake_user` - A human user who can interact with Snowflake
123+
- `SERVICE``snowflake_service_user` - A service or application user without human interaction (cannot use password/SAML authentication, cannot have first_name, last_name, must_change_password)
124+
- `LEGACY_SERVICE``snowflake_legacy_service_user` - Similar to SERVICE but allows password and SAML authentication (cannot have first_name, last_name)
125+
126+
Supported resources:
127+
- snowflake_user
128+
- snowflake_service_user
129+
- snowflake_legacy_service_user
130+
111131
- **INPUT**:
112132
- Migration script operates on STDIN input in CSV format. You can redirect the input from a file or pipe it from another command.
113133
- **OUTPUT**:
@@ -586,7 +606,7 @@ As an example, let's import all schemas in a given database. First, we need to d
586606
terraform {
587607
required_providers {
588608
snowflake = {
589-
source = "Snowflake-Labs/snowflake"
609+
source = "snowflakedb/snowflake"
590610
}
591611
local = {
592612
source = "hashicorp/local"
@@ -601,14 +621,16 @@ data "snowflake_schemas" "test" {
601621
}
602622
603623
locals {
604-
# Transform each schema by merging show_output and flattened parameters
624+
# Transform each schema by merging show_output, describe_output, and flattened parameters
605625
schemas_flattened = [
606626
for schema in data.snowflake_schemas.test.schemas : merge(
607627
schema.show_output[0],
628+
# Include describe output fields (if describe_output is present)
629+
length(schema.describe_output) > 0 ? schema.describe_output[0] : {},
608630
# Flatten parameters: convert each parameter to {param_name}_value and {param_name}_level
609631
{
610632
for param_key, param_values in schema.parameters[0] :
611-
param_key => param_values[0].value
633+
"${param_key}_value" => param_values[0].value
612634
},
613635
{
614636
for param_key, param_values in schema.parameters[0] :
@@ -620,10 +642,28 @@ locals {
620642
# Get all unique keys from the first schema to create CSV header
621643
csv_header = join(",", [for key in keys(local.schemas_flattened[0]) : "\"${key}\""])
622644
645+
# Convert each schema object to CSV row (properly escape quotes and newlines for CSV format)
646+
csv_escape = {
647+
for schema in local.schemas_flattened :
648+
schema.name => {
649+
for key in keys(local.schemas_flattened[0]) :
650+
key => replace(
651+
replace(
652+
replace(tostring(lookup(schema, key, "")), "\\", "\\\\"),
653+
"\n", "\\n"
654+
),
655+
"\"", "\"\""
656+
)
657+
}
658+
}
659+
623660
# Convert each schema object to CSV row
624661
csv_rows = [
625662
for schema in local.schemas_flattened :
626-
join(",", [for key in keys(local.schemas_flattened[0]) : "\"${lookup(schema, key, "")}\""])
663+
join(",", [
664+
for key in keys(local.schemas_flattened[0]) :
665+
"\"${local.csv_escape[schema.name][key]}\""
666+
])
627667
]
628668
629669
# Combine header and rows

pkg/scripts/migration_script/common.go

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,27 @@ package main
22

33
import (
44
"strconv"
5+
"strings"
56

67
"github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk"
78
)
89

10+
// csvUnescape reverses the CSV escaping done by the Terraform HCL generator.
11+
// It converts:
12+
// - \n -> newline
13+
// - \r -> carriage return
14+
// - \\ -> backslash
15+
func csvUnescape(s string) string {
16+
const placeholder = "\x00BACKSLASH\x00"
17+
18+
result := s
19+
result = strings.ReplaceAll(result, "\\\\", placeholder)
20+
result = strings.ReplaceAll(result, "\\n", "\n")
21+
result = strings.ReplaceAll(result, "\\r", "\r")
22+
result = strings.ReplaceAll(result, placeholder, "\\")
23+
return result
24+
}
25+
926
func handleOptionalFieldWithBuilder[T any, U any](parameter *T, builder func(T) *U) {
1027
if parameter != nil {
1128
builder(*parameter)
Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
package main
2+
3+
import (
4+
"testing"
5+
6+
"github.com/stretchr/testify/assert"
7+
)
8+
9+
func TestCsvUnescape(t *testing.T) {
10+
testCases := []struct {
11+
name string
12+
input string
13+
expected string
14+
}{
15+
{
16+
name: "no escaping needed",
17+
input: "simple text",
18+
expected: "simple text",
19+
},
20+
{
21+
name: "escaped newline",
22+
input: "line1\\nline2",
23+
expected: "line1\nline2",
24+
},
25+
{
26+
name: "escaped carriage return",
27+
input: "line1\\rline2",
28+
expected: "line1\rline2",
29+
},
30+
{
31+
name: "escaped backslash",
32+
input: "path\\\\to\\\\file",
33+
expected: "path\\to\\file",
34+
},
35+
{
36+
name: "multiple newlines",
37+
input: "line1\\nline2\\nline3",
38+
expected: "line1\nline2\nline3",
39+
},
40+
{
41+
name: "RSA key format",
42+
input: "-----BEGIN PUBLIC KEY-----\\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A\\nMIIBCgKCAQEA...\\n-----END PUBLIC KEY-----",
43+
expected: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A\nMIIBCgKCAQEA...\n-----END PUBLIC KEY-----",
44+
},
45+
{
46+
name: "mixed escape sequences",
47+
input: "text\\nwith\\nnewlines\\rand\\\\backslash",
48+
expected: "text\nwith\nnewlines\rand\\backslash",
49+
},
50+
{
51+
name: "empty string",
52+
input: "",
53+
expected: "",
54+
},
55+
{
56+
name: "backslash followed by n (not newline)",
57+
input: "\\\\n",
58+
expected: "\\n",
59+
},
60+
}
61+
62+
for _, tc := range testCases {
63+
t.Run(tc.name, func(t *testing.T) {
64+
result := csvUnescape(tc.input)
65+
assert.Equal(t, tc.expected, result)
66+
})
67+
}
68+
}

0 commit comments

Comments
 (0)