|
| 1 | +/* |
| 2 | + * Licensed to the Apache Software Foundation (ASF) under one or more |
| 3 | + * contributor license agreements. See the NOTICE file distributed with |
| 4 | + * this work for additional information regarding copyright ownership. |
| 5 | + * The ASF licenses this file to You under the Apache License, Version 2.0 |
| 6 | + * (the "License"); you may not use this file except in compliance with |
| 7 | + * the License. You may obtain a copy of the License at |
| 8 | + * |
| 9 | + * http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | + * |
| 11 | + * Unless required by applicable law or agreed to in writing, software |
| 12 | + * distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | + * See the License for the specific language governing permissions and |
| 15 | + * limitations under the License. |
| 16 | + */ |
| 17 | + |
| 18 | +package org.apache.spark.sql.catalog |
| 19 | + |
| 20 | +import java.io.IOException |
| 21 | + |
| 22 | +import org.apache.spark.sql.SparkSession |
| 23 | +import org.apache.spark.sql.catalyst.TableIdentifier |
| 24 | +import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, CatalogTable} |
| 25 | + |
| 26 | +import org.apache.carbondata.core.datastore.impl.FileFactory |
| 27 | +import org.apache.carbondata.core.fileoperations.FileWriteOperation |
| 28 | +import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier |
| 29 | +import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl |
| 30 | +import org.apache.carbondata.core.metadata.schema.table.TableInfo |
| 31 | +import org.apache.carbondata.core.util.CarbonUtil |
| 32 | +import org.apache.carbondata.core.util.path.CarbonTablePath |
| 33 | +import org.apache.carbondata.core.writer.ThriftWriter |
| 34 | +import org.apache.carbondata.format.{TableInfo => ExternalTableInfo} |
| 35 | + |
| 36 | + |
| 37 | +private[catalog] object CarbonCatalogImpl extends CarbonCatalog { |
| 38 | + |
| 39 | + def createTable(tableDefinition: CatalogTable, |
| 40 | + ignoreIfExists: Boolean, |
| 41 | + validateLocation: Boolean = true)(sparkSession: SparkSession): Unit = { |
| 42 | + sparkSession.sessionState.catalog.createTable(tableDefinition, ignoreIfExists, validateLocation) |
| 43 | + } |
| 44 | + |
| 45 | + def tableExists(name: TableIdentifier)(sparkSession: SparkSession): Boolean = { |
| 46 | + sparkSession.sessionState.catalog.tableExists(name) |
| 47 | + } |
| 48 | + |
| 49 | + override def getDatabaseMetadata(name: String)(sparkSession: SparkSession): CatalogDatabase = { |
| 50 | + sparkSession.sessionState.catalog.getDatabaseMetadata(name) |
| 51 | + } |
| 52 | + |
| 53 | + override def listDatabases()(sparkSession: SparkSession): Seq[String] = { |
| 54 | + sparkSession.sessionState.catalog.listDatabases() |
| 55 | + } |
| 56 | + |
| 57 | + override def listTables(dbName: String)(sparkSession: SparkSession): Seq[TableIdentifier] = { |
| 58 | + sparkSession.sessionState.catalog.listTables(dbName) |
| 59 | + } |
| 60 | + |
| 61 | + override def getTableMetadata(tableIdentifier: TableIdentifier) |
| 62 | + (sparkSession: SparkSession): CatalogTable = { |
| 63 | + sparkSession.sessionState.catalog.getTableMetadata(tableIdentifier) |
| 64 | + } |
| 65 | + |
| 66 | + override def dropTable(name: TableIdentifier, ignoreIfNotExists: Boolean, |
| 67 | + purge: Boolean)(sparkSession: SparkSession): Unit = { |
| 68 | + sparkSession.sessionState.catalog.dropTable(name, ignoreIfNotExists = true, purge = false) |
| 69 | + } |
| 70 | + |
| 71 | + override def getSchema(dbName: String, tableName: String, |
| 72 | + tablePath: String): Option[TableInfo] = { |
| 73 | + val schemaConverter = new ThriftWrapperSchemaConverterImpl |
| 74 | + val tableMetadataFile = CarbonTablePath.getSchemaFilePath(tablePath) |
| 75 | + if (FileFactory.isFileExist(tableMetadataFile)) { |
| 76 | + val tableInfo: ExternalTableInfo = CarbonUtil.readSchemaFile(tableMetadataFile) |
| 77 | + val wrapperTableInfo = |
| 78 | + schemaConverter.fromExternalToWrapperTableInfo(tableInfo, dbName, tableName, tablePath) |
| 79 | + Some(wrapperTableInfo) |
| 80 | + } else { |
| 81 | + None |
| 82 | + } |
| 83 | + } |
| 84 | + |
| 85 | + override def getLastSchemaModificationTime(schemaFilePath: String): Long = { |
| 86 | + val schemaFile = FileFactory.getCarbonFile(schemaFilePath) |
| 87 | + if (schemaFile.exists()) { |
| 88 | + schemaFile.getLastModifiedTime |
| 89 | + } else { |
| 90 | + -1L |
| 91 | + } |
| 92 | + } |
| 93 | + |
| 94 | + /** |
| 95 | + * This method will write the schema thrift file in carbon store and load table metadata |
| 96 | + */ |
| 97 | + def saveSchema(identifier: AbsoluteTableIdentifier, thriftTableInfo: ExternalTableInfo): Long = { |
| 98 | + val schemaMetadataPath = CarbonTablePath.getMetadataPath(identifier.getTablePath) |
| 99 | + if (!FileFactory.isFileExist(schemaMetadataPath)) { |
| 100 | + val isDirCreated = FileFactory |
| 101 | + .mkdirs(schemaMetadataPath, SparkSession.getActiveSession.get.sessionState.newHadoopConf()) |
| 102 | + if (!isDirCreated) { |
| 103 | + throw new IOException(s"Failed to create the metadata directory $schemaMetadataPath") |
| 104 | + } |
| 105 | + } |
| 106 | + val schemaFilePath = CarbonTablePath.getSchemaFilePath(identifier.getTablePath) |
| 107 | + val thriftWriter = new ThriftWriter(schemaFilePath, false) |
| 108 | + thriftWriter.open(FileWriteOperation.OVERWRITE) |
| 109 | + thriftWriter.write(thriftTableInfo) |
| 110 | + thriftWriter.close() |
| 111 | + val modifiedTime = System.currentTimeMillis() |
| 112 | + FileFactory.getCarbonFile(schemaFilePath).setLastModifiedTime(modifiedTime) |
| 113 | + modifiedTime |
| 114 | + } |
| 115 | + |
| 116 | + // def alterTable(wrapperTableInfo: TableInfo)(sparkSession: SparkSession): Unit = { |
| 117 | + // val schemaParts = CarbonUtil.convertToMultiGsonStrings(wrapperTableInfo, "=", "'", "") |
| 118 | + // val hiveClient = sparkSession |
| 119 | + // .sessionState |
| 120 | + // .catalog |
| 121 | + // .externalCatalog.asInstanceOf[HiveExternalCatalog] |
| 122 | + // .client |
| 123 | + // hiveClient.runSqlHive(s"ALTER TABLE " + |
| 124 | + // s"`${wrapperTableInfo.getDatabaseName}`.`${wrapperTableInfo.getFactTable.getTableName}` |
| 125 | + // " + |
| 126 | + // s"SET SERDEPROPERTIES($schemaParts)") |
| 127 | + // } |
| 128 | +} |
0 commit comments