From b3d17d6c9c9c88333670b4a27304c1e97c84f120 Mon Sep 17 00:00:00 2001 From: jack86596 Date: Tue, 12 Oct 2021 14:50:43 +0800 Subject: [PATCH] [CARBONDATA-4304] Initialize CarbonEnv will try to create folder on local filesystem if storepath is set without scheme --- .../org/apache/spark/sql/CarbonEnv.scala | 7 +++- .../createTable/TestCreateTablePath.scala | 41 +++++++++++++++++++ 2 files changed, 47 insertions(+), 1 deletion(-) diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonEnv.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonEnv.scala index b73772f0975..3b6f8be1ddd 100644 --- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonEnv.scala +++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonEnv.scala @@ -74,8 +74,13 @@ class CarbonEnv { var storePath = properties.getProperty(CarbonCommonConstants.STORE_LOCATION) if (storePath == null) { storePath = FileFactory.getUpdatedFilePath(sparkSession.conf.get("spark.sql.warehouse.dir")) - properties.addProperty(CarbonCommonConstants.STORE_LOCATION, storePath) } + val tmpPath = new Path(storePath) + val fs = tmpPath.getFileSystem(sparkSession.sparkContext.hadoopConfiguration) + val qualifiedPath = fs.makeQualified(tmpPath) + storePath = qualifiedPath.toString + properties.addProperty(CarbonCommonConstants.STORE_LOCATION, + Path.getPathWithoutSchemeAndAuthority(qualifiedPath).toString) LOGGER.info(s"Initializing CarbonEnv, store location: $storePath") // Creating the index server temp folder where splits for select query is written CarbonUtil.createTempFolderForIndexServer(null); diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTablePath.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTablePath.scala index dd3ddc033ef..8e59b63fa9c 100644 --- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTablePath.scala +++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTablePath.scala @@ -21,6 +21,9 @@ import org.apache.spark.sql.CarbonEnv import org.apache.spark.sql.test.util.QueryTest import org.scalatest.BeforeAndAfterAll +import org.apache.carbondata.core.constants.CarbonCommonConstants +import org.apache.carbondata.core.util.CarbonProperties + /** * Test functionality of create table with location */ @@ -40,4 +43,42 @@ class TestCreateTablePath extends QueryTest with BeforeAndAfterAll { sqlContext.sparkContext.hadoopConfiguration.set("fs.defaultFS", "file:///") } + test("test table in default database si under storepath if storepath is not set or" + + " equals to spark.sql.warehouse.dir") { + sql("create table if not exists t1 (i int) stored as carbondata") + val table = CarbonEnv.getCarbonTable(None, "t1")(sqlContext.sparkSession) + val tablePath = table.getTablePath + val storePath = CarbonProperties.getStorePath + assert(tablePath.equals(storePath + CarbonCommonConstants.FILE_SEPARATOR + table.getTableName)) + sql("DROP TABLE IF EXISTS t1") + } + + test("test table in other database is under storepath/.db " + + "if storepath is not set or equals to spark.sql.warehouse.dir") { + sql("create database if not exists db1") + sql("create table if not exists db1.t1 (i int) stored as carbondata") + val table = CarbonEnv.getCarbonTable(Option[String]("db1"), "t1")(sqlContext.sparkSession) + val tablePath = table.getTablePath + val storePath = CarbonProperties.getStorePath + assert(tablePath.equals(storePath + CarbonCommonConstants.FILE_SEPARATOR + + table.getDatabaseName + ".db" + CarbonCommonConstants.FILE_SEPARATOR + + table.getTableName)) + sql("DROP TABLE IF EXISTS db1.t1") + sql("DROP DATABASE IF EXISTS db1") + } + + test("test table is under storepath/ if storepath set" + + " different from spark.sql.warehouse.dir") { + CarbonProperties.getInstance().addProperty(CarbonCommonConstants.STORE_LOCATION, + warehouse + CarbonCommonConstants.FILE_SEPARATOR + "carbon.store") + sql("create table if not exists t1 (i int) stored as carbondata") + val table = CarbonEnv.getCarbonTable(None, "t1")(sqlContext.sparkSession) + val tablePath = table.getTablePath + val storePath = CarbonProperties.getStorePath + assert(tablePath.equals(storePath + CarbonCommonConstants.FILE_SEPARATOR + + table.getDatabaseName + CarbonCommonConstants.FILE_SEPARATOR + + table.getTableName)) + sql("DROP TABLE IF EXISTS t1") + CarbonProperties.getInstance().removeProperty(CarbonCommonConstants.STORE_LOCATION) + } }