This module allows to create a BigLake Metastore with databases and corresponding tables in each database.
module "biglake_catalog" {
source = "./fabric/modules/biglake-catalog"
project_id = var.project_id
name = "my_catalog"
location = "US"
databases = {
my_database = {
type = "HIVE"
hive_options = {
location_uri = "gs://my-bucket/my-database-folder"
parameters = {
"owner" : "John Doe"
}
}
tables = {
my_table = {
type = "HIVE"
hive_options = {
table_type = "MANAGED_TABLE"
location_uri = "gs://my-bucket/my-table-folder"
input_format = "org.apache.hadoop.mapred.SequenceFileInputFormat"
output_format = "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat"
parameters = {
"spark.sql.create.version" = "3.1.3"
"spark.sql.sources.schema.numParts" = "1"
"transient_lastDdlTime" = "1680894197"
"spark.sql.partitionProvider" = "catalog"
"owner" = "John Doe"
"spark.sql.sources.schema.part.0" = jsonencode({
type = "struct"
fields = [
{
name = "id"
type = "integer"
nullable = true
metadata = {}
},
{
name = "name"
type = "string"
nullable = true
metadata = {}
},
{
name = "age"
type = "integer"
nullable = true
metadata = {}
}
]
})
"spark.sql.sources.provider" = "iceberg"
"provider" = "iceberg"
}
}
}
}
}
}
}
# tftest modules=1 resources=3 inventory=basic.yaml
name |
description |
type |
required |
default |
databases |
Databases. |
map(object({…})) |
✓ |
|
location |
Location. |
string |
✓ |
|
name |
Name. |
string |
✓ |
|
project_id |
Project ID. |
string |
✓ |
|