@@ -84,6 +84,7 @@ def __init__(self, metastore_id: str = None):
8484 )
8585 .enableHiveSupport ()
8686 )
87+ _managed_table_location = None
8788
8889 if not developer_enabled () and metastore_id :
8990 # Get the authentication credentials for the OCI data catalog service
@@ -94,12 +95,11 @@ def __init__(self, metastore_id: str = None):
9495
9596 data_catalog_client = OCIClientFactory (** auth ).data_catalog
9697 metastore = data_catalog_client .get_metastore (metastore_id ).data
98+ _managed_table_location = metastore .default_managed_table_location
9799 # Configure the Spark session builder object to use the specified metastore
98100 spark_builder .config (
99101 "spark.hadoop.oracle.dcat.metastore.id" , metastore_id
100- ).config (
101- "spark.sql.warehouse.dir" , metastore .default_managed_table_location
102- ).config (
102+ ).config ("spark.sql.warehouse.dir" , _managed_table_location ).config (
103103 "spark.driver.memory" , "16G"
104104 )
105105
@@ -114,7 +114,12 @@ def __init__(self, metastore_id: str = None):
114114
115115 self .spark_session .conf .set ("spark.sql.execution.arrow.pyspark.enabled" , "true" )
116116 self .spark_session .sparkContext .setLogLevel ("OFF" )
117+ self .managed_table_location = _managed_table_location
117118
118119 def get_spark_session (self ):
119120 """Access method to get the spark session."""
120121 return self .spark_session
122+
123+ def get_managed_table_location (self ):
124+ """Returns the managed table location for the spark"""
125+ return self .managed_table_location
0 commit comments