diff --git a/tests/yoda_dbt2looker/core/test_core_generator.py b/tests/yoda_dbt2looker/core/test_core_generator.py index b19561f..9ecf195 100644 --- a/tests/yoda_dbt2looker/core/test_core_generator.py +++ b/tests/yoda_dbt2looker/core/test_core_generator.py @@ -80,7 +80,16 @@ def test_lookml_dimension_groups_from_model(self, mock_map_adapter_type_to_looke mock_column_2.data_type = "date" mock_dbt_model.columns = {"col1": mock_column_1, "col2": mock_column_2} - mock_map_adapter_type_to_looker.side_effect = ["timestamp", "timestamp", "datetime", "datetime"] + mock_map_adapter_type_to_looker.side_effect = [ + "timestamp", + "timestamp", + "datetime", + "datetime", + "timestamp", + "timestamp", + "datetime", + "datetime", + ] mock_lookml_date_time_dimension_group.return_value = "datetime_group" result = generator.lookml_dimension_groups_from_model(mock_dbt_model, SupportedDbtAdapters.spark.value) @@ -89,6 +98,18 @@ def test_lookml_dimension_groups_from_model(self, mock_map_adapter_type_to_looke mock_lookml_date_time_dimension_group.assert_has_calls( [call(mock_column_1, SupportedDbtAdapters.spark), call(mock_column_2, SupportedDbtAdapters.spark)]) + result = generator.lookml_dimension_groups_from_model( + mock_dbt_model, SupportedDbtAdapters.databricks.value + ) + + assert result == ["datetime_group", "datetime_group"] + mock_lookml_date_time_dimension_group.assert_has_calls( + [ + call(mock_column_1, SupportedDbtAdapters.databricks), + call(mock_column_2, SupportedDbtAdapters.databricks), + ] + ) + @patch('yoda_dbt2looker.core.generator.map_adapter_type_to_looker') def test_lookml_dimensions_from_model(self, mock_map_adapter_type_to_looker): mock_dbt_model = MagicMock(spec=DbtModel) @@ -118,3 +139,20 @@ def test_lookml_dimensions_from_model(self, mock_map_adapter_type_to_looker): assert result == [expected_dimension] mock_map_adapter_type_to_looker.assert_has_calls([call(SupportedDbtAdapters.spark, "integer") , call(SupportedDbtAdapters.spark, "integer")]) + result = generator.lookml_dimensions_from_model( + mock_dbt_model, SupportedDbtAdapters.databricks + ) + + expected_dimension = { + "name": "test_column", + "type": "number", + "sql": "${TABLE}.test_column", + "description": "test column", + } + assert result == [expected_dimension] + mock_map_adapter_type_to_looker.assert_has_calls( + [ + call(SupportedDbtAdapters.databricks, "integer"), + call(SupportedDbtAdapters.databricks, "integer"), + ] + ) diff --git a/yoda_dbt2looker/generator.py b/yoda_dbt2looker/generator.py index 52fcd84..ab6843d 100644 --- a/yoda_dbt2looker/generator.py +++ b/yoda_dbt2looker/generator.py @@ -175,6 +175,24 @@ "timestamp": "timestamp", "date": "datetime", }, + "databricks": { + "byte": "number", + "short": "number", + "integer": "number", + "int": "number", + "bigint": "number", + "smallint": "number", + "long": "number", + "float": "number", + "double": "number", + "decimal": "number", + "string": "string", + "varchar": "string", + "char": "string", + "boolean": "yesno", + "timestamp": "timestamp", + "date": "datetime", + }, } looker_date_time_types = ["datetime", "timestamp"] @@ -203,7 +221,9 @@ def map_adapter_type_to_looker( return None normalised_column_type = ( normalise_spark_types(column_type) - if adapter_type == models.SupportedDbtAdapters.spark.value + if adapter_type in + [models.SupportedDbtAdapters.spark.value, + models.SupportedDbtAdapters.databricks.value] else column_type ) looker_type = LOOKER_DTYPE_MAP[adapter_type].get(normalised_column_type) diff --git a/yoda_dbt2looker/models.py b/yoda_dbt2looker/models.py index 845b1fe..1139050 100644 --- a/yoda_dbt2looker/models.py +++ b/yoda_dbt2looker/models.py @@ -20,6 +20,7 @@ class SupportedDbtAdapters(str, Enum): redshift = "redshift" snowflake = "snowflake" spark = "spark" + databricks = "databricks" # Lookml types