Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,191 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.iotdb.db.it.schema;

import org.apache.iotdb.it.env.EnvFactory;
import org.apache.iotdb.itbase.category.ClusterIT;
import org.apache.iotdb.itbase.category.LocalStandaloneIT;
import org.apache.iotdb.util.AbstractSchemaIT;

import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runners.Parameterized;

import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;

import static org.junit.Assert.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;

@Category({LocalStandaloneIT.class, ClusterIT.class})
public class IoTDBAlterEncodingCompressorIT extends AbstractSchemaIT {

public IoTDBAlterEncodingCompressorIT(SchemaTestMode schemaTestMode) {
super(schemaTestMode);
}

@Parameterized.BeforeParam
public static void before() throws Exception {
setUpEnvironment();
EnvFactory.getEnv().initClusterEnvironment();
}

@Parameterized.AfterParam
public static void after() throws Exception {
EnvFactory.getEnv().cleanClusterEnvironment();
tearDownEnvironment();
}

@After
public void tearDown() throws Exception {
clearSchema();
}

@Test
public void alterEncodingAndCompressorTest() throws Exception {
if (schemaTestMode.equals(SchemaTestMode.PBTree)) {
return;
}
try (final Connection connection = EnvFactory.getEnv().getConnection();
final Statement statement = connection.createStatement()) {
statement.execute("create timeSeries root.vehicle.wind.a int32");

try {
statement.execute("alter timeSeries root set STORAGE_PROPERTIES encoding=PLAIN");
fail();
} catch (final SQLException e) {
Assert.assertEquals("701: The timeSeries shall not be root.", e.getMessage());
}

try {
statement.execute(
"alter timeSeries root.nonExist.** set STORAGE_PROPERTIES encoding=PLAIN");
fail();
} catch (final SQLException e) {
Assert.assertEquals(
"508: Timeseries [root.nonExist.**] does not exist or is represented by device template",
e.getMessage());
}

try {
statement.execute(
"alter timeSeries if exists root.nonExist.** set STORAGE_PROPERTIES encoding=PLAIN");
} catch (final SQLException e) {
fail(
"Alter encoding & compressor shall not fail when timeSeries not exists if set if exists");
}

try {
statement.execute(
"alter timeSeries if exists root.vehicle.** set STORAGE_PROPERTIES encoding=aaa");
fail();
} catch (final SQLException e) {
Assert.assertEquals("701: Unsupported encoding: AAA", e.getMessage());
}

try {
statement.execute(
"alter timeSeries if exists root.vehicle.** set STORAGE_PROPERTIES compressor=aaa");
fail();
} catch (final SQLException e) {
Assert.assertEquals("701: Unsupported compressor: AAA", e.getMessage());
}

try {
statement.execute(
"alter timeSeries if exists root.vehicle.** set STORAGE_PROPERTIES falseKey=aaa");
fail();
} catch (final SQLException e) {
Assert.assertEquals("701: property falsekey is unsupported yet.", e.getMessage());
}

try {
statement.execute(
"alter timeSeries if exists root.vehicle.** set STORAGE_PROPERTIES encoding=DICTIONARY");
fail();
} catch (final SQLException e) {
Assert.assertTrue(e.getMessage().contains("encoding DICTIONARY does not support INT32"));
}

statement.execute(
"alter timeSeries root.** set STORAGE_PROPERTIES encoding=Plain, compressor=LZMA2");

try (final ResultSet resultSet = statement.executeQuery("SHOW TIMESERIES")) {
while (resultSet.next()) {
assertEquals("PLAIN", resultSet.getString(5));
assertEquals("LZMA2", resultSet.getString(6));
}
}

statement.execute("create user IoTDBUser '!@#$!dfdfzvd343'");
statement.execute("grant write on root.vehicle.wind.a to user IoTDBUser");
statement.execute("create timeSeries root.vehicle.wind.b int32");
}

try (final Connection connection =
EnvFactory.getEnv().getConnection("IoTDBUser", "!@#$!dfdfzvd343");
final Statement statement = connection.createStatement()) {
try {
statement.execute(
"alter timeSeries root.vehicle.** set STORAGE_PROPERTIES encoding=PLAIN, compressor=LZMA2");
fail();
} catch (final SQLException e) {
Assert.assertEquals(
"803: No permissions for this operation, please add privilege WRITE_SCHEMA on [root.vehicle.**]",
e.getMessage());
}

try {
statement.execute(
"alter timeSeries if permitted root.vehicle.** set STORAGE_PROPERTIES encoding=GORILLA, compressor=GZIP");
} catch (final SQLException e) {
fail("Alter encoding & compressor shall not fail when no privileges if set if permitted");
}

try {
statement.execute(
"alter timeSeries if permitted root.nonExist.** set STORAGE_PROPERTIES encoding=GORILLA, compressor=GZIP");
} catch (final SQLException e) {
fail("Alter encoding & compressor shall not fail if the intersected paths are empty");
}
}

try (final Connection connection = EnvFactory.getEnv().getConnection();
final Statement statement = connection.createStatement()) {
try (final ResultSet resultSet =
statement.executeQuery("SHOW TIMESERIES root.vehicle.wind.b")) {
resultSet.next();
assertEquals("TS_2DIFF", resultSet.getString(5));
assertEquals("LZ4", resultSet.getString(6));
}

try (final ResultSet resultSet =
statement.executeQuery("SHOW TIMESERIES root.vehicle.wind.a")) {
resultSet.next();
assertEquals("GORILLA", resultSet.getString(5));
assertEquals("GZIP", resultSet.getString(6));
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -79,9 +79,9 @@ public void testPureSchemaInclusion() throws Exception {
Arrays.asList(
// TODO: add database creation after the database auto creating on receiver can be
// banned
"create timeseries root.ln.wf01.wt01.status with datatype=BOOLEAN,encoding=PLAIN",
"ALTER timeseries root.ln.wf01.wt01.status ADD TAGS tag3=v3",
"ALTER timeseries root.ln.wf01.wt01.status ADD ATTRIBUTES attr4=v4"))) {
"create timeSeries root.ln.wf01.wt01.status with datatype=BOOLEAN,encoding=PLAIN",
"ALTER timeSeries root.ln.wf01.wt01.status ADD TAGS tag3=v3",
"ALTER timeSeries root.ln.wf01.wt01.status ADD ATTRIBUTES attr4=v4"))) {
return;
}

Expand All @@ -95,10 +95,19 @@ public void testPureSchemaInclusion() throws Exception {
if (!TestUtils.tryExecuteNonQueriesWithRetry(
senderEnv,
Arrays.asList(
"insert into root.ln.wf01.wt01(time, status) values(now(), false)", "flush"))) {
"ALTER timeSeries root.** set STORAGE_PROPERTIES compressor=ZSTD",
"insert into root.ln.wf01.wt01(time, status) values(now(), false)",
"flush"))) {
return;
}

TestUtils.assertDataEventuallyOnEnv(
receiverEnv,
"show timeseries",
"Timeseries,Alias,Database,DataType,Encoding,Compression,Tags,Attributes,Deadband,DeadbandParameters,ViewType,",
Collections.singleton(
"root.ln.wf01.wt01.status,null,root.ln,BOOLEAN,PLAIN,ZSTD,{\"tag3\":\"v3\"},{\"attr4\":\"v4\"},null,null,BASE,"));

TestUtils.assertDataAlwaysOnEnv(
receiverEnv, "select * from root.**", "Time,", Collections.emptySet());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ ddlStatement
// Database
: createDatabase | dropDatabase | dropPartition | alterDatabase | showDatabases | countDatabases
// Timeseries & Path
| createTimeseries | dropTimeseries | alterTimeseries
| createTimeseries | dropTimeseries | alterTimeseries | alterEncodingCompressor
| showDevices | showTimeseries | showChildPaths | showChildNodes | countDevices | countTimeseries | countNodes
// Device Template
| createSchemaTemplate | createTimeseriesUsingSchemaTemplate | dropSchemaTemplate | dropTimeseriesOfSchemaTemplate
Expand Down Expand Up @@ -176,6 +176,10 @@ alterClause
| UPSERT aliasClause? tagClause? attributeClause?
;

alterEncodingCompressor
: ALTER TIMESERIES (IF EXISTS)? (IF PERMITTED)? prefixPath (COMMA prefixPath)* SET STORAGE_PROPERTIES attributePair (COMMA attributePair)*
;

aliasClause
: ALIAS operator_eq alias
;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -566,6 +566,10 @@ PATHS
: P A T H S
;

PERMITTED
: P E R M I T T E D
;

PIPE
: P I P E
;
Expand Down Expand Up @@ -790,6 +794,10 @@ STOP
: S T O P
;

STORAGE_PROPERTIES
: S T O R A G E '_' P R O P E R T I E S
;

SUBSCRIPTION
: S U B S C R I P T I O N
;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,8 @@ public enum CnToDnAsyncRequestType {
DELETE_DATA_FOR_DELETE_SCHEMA,
DELETE_TIMESERIES,

ALTER_ENCODING_COMPRESSOR,

CONSTRUCT_SCHEMA_BLACK_LIST_WITH_TEMPLATE,
ROLLBACK_SCHEMA_BLACK_LIST_WITH_TEMPLATE,
DEACTIVATE_TEMPLATE,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
import org.apache.iotdb.confignode.client.async.handlers.rpc.subscription.ConsumerGroupPushMetaRPCHandler;
import org.apache.iotdb.confignode.client.async.handlers.rpc.subscription.TopicPushMetaRPCHandler;
import org.apache.iotdb.mpp.rpc.thrift.TActiveTriggerInstanceReq;
import org.apache.iotdb.mpp.rpc.thrift.TAlterEncodingCompressorReq;
import org.apache.iotdb.mpp.rpc.thrift.TAlterViewReq;
import org.apache.iotdb.mpp.rpc.thrift.TCheckSchemaRegionUsingTemplateReq;
import org.apache.iotdb.mpp.rpc.thrift.TCheckTimeSeriesExistenceReq;
Expand Down Expand Up @@ -297,6 +298,11 @@ protected void initActionMapBuilder() {
CnToDnAsyncRequestType.DELETE_TIMESERIES,
(req, client, handler) ->
client.deleteTimeSeries((TDeleteTimeSeriesReq) req, (SchemaUpdateRPCHandler) handler));
actionMapBuilder.put(
CnToDnAsyncRequestType.ALTER_ENCODING_COMPRESSOR,
(req, client, handler) ->
client.alterEncodingCompressor(
(TAlterEncodingCompressorReq) req, (SchemaUpdateRPCHandler) handler));
actionMapBuilder.put(
CnToDnAsyncRequestType.CONSTRUCT_SCHEMA_BLACK_LIST_WITH_TEMPLATE,
(req, client, handler) ->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ public static DataNodeAsyncRequestRPCHandler<?> buildHandler(
case ROLLBACK_SCHEMA_BLACK_LIST:
case DELETE_DATA_FOR_DELETE_SCHEMA:
case DELETE_TIMESERIES:
case ALTER_ENCODING_COMPRESSOR:
case CONSTRUCT_SCHEMA_BLACK_LIST_WITH_TEMPLATE:
case ROLLBACK_SCHEMA_BLACK_LIST_WITH_TEMPLATE:
case DEACTIVATE_TEMPLATE:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
import org.apache.iotdb.confignode.consensus.request.write.partition.CreateSchemaPartitionPlan;
import org.apache.iotdb.confignode.consensus.request.write.partition.RemoveRegionLocationPlan;
import org.apache.iotdb.confignode.consensus.request.write.partition.UpdateRegionLocationPlan;
import org.apache.iotdb.confignode.consensus.request.write.pipe.payload.PipeAlterEncodingCompressorPlan;
import org.apache.iotdb.confignode.consensus.request.write.pipe.payload.PipeDeactivateTemplatePlan;
import org.apache.iotdb.confignode.consensus.request.write.pipe.payload.PipeDeleteLogicalViewPlan;
import org.apache.iotdb.confignode.consensus.request.write.pipe.payload.PipeDeleteTimeSeriesPlan;
Expand Down Expand Up @@ -408,6 +409,9 @@ public static ConfigPhysicalPlan create(final ByteBuffer buffer) throws IOExcept
case PipeDeactivateTemplate:
plan = new PipeDeactivateTemplatePlan();
break;
case PipeAlterEncodingCompressor:
plan = new PipeAlterEncodingCompressorPlan();
break;
case UpdateTriggersOnTransferNodes:
plan = new UpdateTriggersOnTransferNodesPlan();
break;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,7 @@ public enum ConfigPhysicalPlanType {
PipeDeleteLogicalView((short) 1703),
PipeDeactivateTemplate((short) 1704),
PipeSetTTL((short) 1705),
PipeAlterEncodingCompressor((short) 1708),

/** Subscription */
CreateTopic((short) 1800),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.apache.iotdb.confignode.consensus.request.write.database.DatabaseSchemaPlan;
import org.apache.iotdb.confignode.consensus.request.write.database.DeleteDatabasePlan;
import org.apache.iotdb.confignode.consensus.request.write.database.SetTTLPlan;
import org.apache.iotdb.confignode.consensus.request.write.pipe.payload.PipeAlterEncodingCompressorPlan;
import org.apache.iotdb.confignode.consensus.request.write.pipe.payload.PipeDeactivateTemplatePlan;
import org.apache.iotdb.confignode.consensus.request.write.pipe.payload.PipeDeleteLogicalViewPlan;
import org.apache.iotdb.confignode.consensus.request.write.pipe.payload.PipeDeleteTimeSeriesPlan;
Expand Down Expand Up @@ -83,6 +84,8 @@ public R process(ConfigPhysicalPlan plan, C context) {
return visitRevokeRoleFromUser((AuthorPlan) plan, context);
case SetTTL:
return visitTTL((SetTTLPlan) plan, context);
case PipeAlterEncodingCompressor:
return visitPipeAlterEncodingCompressor((PipeAlterEncodingCompressorPlan) plan, context);
default:
return visitPlan(plan, context);
}
Expand Down Expand Up @@ -190,4 +193,9 @@ public R visitRevokeRoleFromUser(AuthorPlan revokeRoleFromUserPlan, C context) {
public R visitTTL(SetTTLPlan setTTLPlan, C context) {
return visitPlan(setTTLPlan, context);
}

public R visitPipeAlterEncodingCompressor(
final PipeAlterEncodingCompressorPlan pipeAlterEncodingCompressorPlan, final C context) {
return visitPlan(pipeAlterEncodingCompressorPlan, context);
}
}
Loading
Loading