Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

PHOENIX-7481 HBase 3 compatibility changes: Cleanup deprecated APIs, … #2038

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,6 @@
import java.util.concurrent.TimeUnit;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.CheckAndMutate;
Expand All @@ -38,13 +36,11 @@
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Row;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.coprocessor.Batch.Call;
import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.phoenix.compat.hbase.CompatDelegateHTable;

Expand All @@ -69,21 +65,11 @@ public Configuration getConfiguration() {
return delegate.getConfiguration();
}

@Override
public HTableDescriptor getTableDescriptor() throws IOException {
return delegate.getTableDescriptor();
}

@Override
public boolean exists(Get get) throws IOException {
return delegate.exists(get);
}

@Override
public boolean[] existsAll(List<Get> gets) throws IOException {
return delegate.existsAll(gets);
}

@Override
public void batch(List<? extends Row> actions, Object[] results) throws IOException,
InterruptedException {
Expand Down Expand Up @@ -131,18 +117,6 @@ public void put(List<Put> puts) throws IOException {
delegate.put(puts);
}

@Override
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, byte[] value, Put put)
throws IOException {
return delegate.checkAndPut(row, family, qualifier, value, put);
}

@Override
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, CompareOp compareOp,
byte[] value, Put put) throws IOException {
return delegate.checkAndPut(row, family, qualifier, compareOp, value, put);
}

@Override
public void delete(Delete delete) throws IOException {
delegate.delete(delete);
Expand All @@ -153,18 +127,6 @@ public void delete(List<Delete> deletes) throws IOException {
delegate.delete(deletes);
}

@Override
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, byte[] value,
Delete delete) throws IOException {
return delegate.checkAndDelete(row, family, qualifier, value, delete);
}

@Override
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, CompareOp compareOp,
byte[] value, Delete delete) throws IOException {
return delegate.checkAndDelete(row, family, qualifier, compareOp, value, delete);
}

@Override
public Result append(Append append) throws IOException {
return delegate.append(append);
Expand Down Expand Up @@ -230,80 +192,11 @@ public CheckAndMutateResult checkAndMutate(CheckAndMutate checkAndMutate) throws
return delegate.checkAndMutate(checkAndMutate);
}

@Override
public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareOp compareOp,
byte[] value, RowMutations mutation) throws IOException {
return delegate.checkAndMutate(row, family, qualifier, compareOp, value, mutation);
}

@Override
public void setOperationTimeout(int operationTimeout) {
delegate.setOperationTimeout(operationTimeout);
}

@Override
public int getOperationTimeout() {
return delegate.getOperationTimeout();
}

@Override
public int getRpcTimeout() {
return delegate.getRpcTimeout();
}

@Override
public void setRpcTimeout(int rpcTimeout) {
delegate.setRpcTimeout(rpcTimeout);
}

@Override
public TableDescriptor getDescriptor() throws IOException {
return delegate.getDescriptor();
}

@Override
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, CompareOperator op,
byte[] value, Put put) throws IOException {
return delegate.checkAndPut(row, family, qualifier, op, value, put);
}

@Override
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, CompareOperator op,
byte[] value, Delete delete) throws IOException {
return delegate.checkAndDelete(row, family, qualifier, op, value, delete);
}

@Override
public CheckAndMutateBuilder checkAndMutate(byte[] row, byte[] family) {
return delegate.checkAndMutate(row, family);
}

@Override
public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareOperator op,
byte[] value, RowMutations mutation) throws IOException {
return delegate.checkAndMutate(row, family, qualifier, op, value, mutation);
}

@Override
public int getReadRpcTimeout() {
return delegate.getReadRpcTimeout();
}

@Override
public void setReadRpcTimeout(int readRpcTimeout) {
delegate.setReadRpcTimeout(readRpcTimeout);
}

@Override
public int getWriteRpcTimeout() {
return delegate.getWriteRpcTimeout();
}

@Override
public void setWriteRpcTimeout(int writeRpcTimeout) {
delegate.setWriteRpcTimeout(writeRpcTimeout);
}

@Override
public boolean[] exists(List<Get> gets) throws IOException {
return delegate.exists(gets);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
Expand Down Expand Up @@ -65,7 +64,6 @@ public class OmidTransactionTable extends CompatOmidTransactionTable implements
private final boolean addShadowCells;

public OmidTransactionTable() throws SQLException {
super(null);
this.tTable = null;
this.tx = null;
this.addShadowCells = false;
Expand All @@ -80,7 +78,6 @@ public OmidTransactionTable(PhoenixTransactionContext ctx, Table hTable, boolean
}

public OmidTransactionTable(PhoenixTransactionContext ctx, Table hTable, boolean isConflictFree, boolean addShadowCells) throws SQLException {
super(hTable);
assert(ctx instanceof OmidTransactionContext);

OmidTransactionContext omidTransactionContext = (OmidTransactionContext) ctx;
Expand Down Expand Up @@ -165,7 +162,7 @@ public TableName getName() {
return TableName.valueOf(name);
}

@Override
//No @Override for HBase 3 compatibility
public boolean[] existsAll(List<Get> gets) throws IOException {
throw new UnsupportedOperationException();
}
Expand All @@ -186,13 +183,13 @@ public <R> void batchCallback(List<? extends Row> actions,
throw new UnsupportedOperationException();
}

@Override
//No @Override for HBase 3 compatibility
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
byte[] value, Put put) throws IOException {
throw new UnsupportedOperationException();
}

@Override
//No @Override for HBase 3 compatibility
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
byte[] value, Delete delete) throws IOException {
throw new UnsupportedOperationException();
Expand Down Expand Up @@ -256,42 +253,42 @@ public <R extends Message> void batchCoprocessorService(
throw new UnsupportedOperationException();
}

@Override
//No @Override for HBase 3 compatibility
public int getOperationTimeout() {
throw new UnsupportedOperationException();
}

@Override
//No @Override for HBase 3 compatibility
public int getRpcTimeout() {
throw new UnsupportedOperationException();
}

@Override
//No @Override for HBase 3 compatibility
public void setOperationTimeout(int arg0) {
throw new UnsupportedOperationException();
}

@Override
//No @Override for HBase 3 compatibility
public void setRpcTimeout(int arg0) {
throw new UnsupportedOperationException();
}

@Override
//No @Override for HBase 3 compatibility
public int getWriteRpcTimeout() {
throw new UnsupportedOperationException();
}

@Override
//No @Override for HBase 3 compatibility
public void setWriteRpcTimeout(int writeRpcTimeout) {
throw new UnsupportedOperationException();
}

@Override
//No @Override for HBase 3 compatibility
public int getReadRpcTimeout() {
throw new UnsupportedOperationException();
}

@Override
//No @Override for HBase 3 compatibility
public void setReadRpcTimeout(int readRpcTimeout) {
throw new UnsupportedOperationException();
}
Expand All @@ -306,19 +303,19 @@ public boolean[] exists(List<Get> gets) throws IOException {
throw new UnsupportedOperationException();
}

@Override
//No @Override for HBase 3 compatibility
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, CompareOperator op, byte[] value, Put put)
throws IOException {
throw new UnsupportedOperationException();
}

@Override
//No @Override for HBase 3 compatibility
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, CompareOperator op, byte[] value,
Delete delete) throws IOException {
throw new UnsupportedOperationException();
}

@Override
//No @Override for HBase 3 compatibility
public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareOperator op, byte[] value,
RowMutations mutation) throws IOException {
throw new UnsupportedOperationException();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
Expand All @@ -53,7 +54,6 @@
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
Expand Down Expand Up @@ -307,7 +307,7 @@ public void run() {
Scan scan = new Scan();
SingleColumnValueFilter filter = new SingleColumnValueFilter(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,
PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP_BYTES,
CompareFilter.CompareOp.NOT_EQUAL, PLong.INSTANCE.toBytes(0L));
CompareOperator.NOT_EQUAL, PLong.INSTANCE.toBytes(0L));
filter.setFilterIfMissing(true);
scan.setFilter(filter);
scan.addColumn(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
import java.util.Properties;

import org.apache.commons.lang3.ArrayUtils;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
Expand Down Expand Up @@ -1013,7 +1012,7 @@ public void testMakeBaseTableTransactional() throws Exception {

PName tenantId = isMultiTenant ? PNameFactory.newName(TENANT1) : null;
PhoenixConnection phoenixConn = conn.unwrap(PhoenixConnection.class);
Table htable = phoenixConn.getQueryServices().getTable(Bytes.toBytes(baseTableName));
Table table = phoenixConn.getQueryServices().getTable(Bytes.toBytes(baseTableName));
assertFalse(phoenixConn.getTable(new PTableKey(null, baseTableName)).isTransactional());
assertFalse(viewConn.unwrap(PhoenixConnection.class).getTable(new PTableKey(tenantId, viewOfTable)).isTransactional());
}
Expand Down Expand Up @@ -1196,7 +1195,7 @@ public void testDroppingIndexedColDropsViewIndex() throws Exception {

// scan the physical table and verify there is a single row for the second local index
Scan scan = new Scan();
HTable table = (HTable) conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(viewIndexPhysicalTable);
Table table = conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(viewIndexPhysicalTable);
ResultScanner results = table.getScanner(scan);
Result result = results.next();
assertNotNull(result);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@

package org.apache.phoenix.end2end;

import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.phoenix.compile.QueryPlan;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
Expand Down Expand Up @@ -914,7 +913,7 @@ public void testUpsertAfterIndexDrop() throws Exception {
conn.commit();

// the index table is one row
HTable table = (HTable) conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(fullTableName.getBytes());
Table table = conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(fullTableName.getBytes());
ResultScanner resultScanner = table.getScanner(new Scan());
for (Result result : resultScanner) {
System.out.println(result);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
Expand Down Expand Up @@ -1028,7 +1027,7 @@ public void testUpsertAfterIndexDrop() throws Exception {
conn.commit();

// the index table is one row
HTable table = (HTable) conn.unwrap(PhoenixConnection.class).getQueryServices()
Table table = conn.unwrap(PhoenixConnection.class).getQueryServices()
.getTable(fullTableName.getBytes());
ResultScanner resultScanner = table.getScanner(new Scan());
for (Result result : resultScanner) {
Expand Down
Loading