Skip to content

Commit 6f71800

Browse files
szucsvillostoty
andcommitted
PHOENIX-7481 HBase 3 compatibility changes: Cleanup deprecated APIs, HTable and HTableDescriptor
Co-authored-by: Istvan Toth <[email protected]>
1 parent 095617f commit 6f71800

File tree

16 files changed

+467
-158
lines changed

16 files changed

+467
-158
lines changed

phoenix-core-client/src/main/java/org/apache/phoenix/execute/DelegateHTable.java

-105
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@
2424

2525
import org.apache.hadoop.conf.Configuration;
2626
import org.apache.hadoop.hbase.CompareOperator;
27-
import org.apache.hadoop.hbase.HTableDescriptor;
2827
import org.apache.hadoop.hbase.TableName;
2928
import org.apache.hadoop.hbase.client.Append;
3029
import org.apache.hadoop.hbase.client.CheckAndMutate;
@@ -44,7 +43,6 @@
4443
import org.apache.hadoop.hbase.client.TableDescriptor;
4544
import org.apache.hadoop.hbase.client.coprocessor.Batch.Call;
4645
import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
47-
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
4846
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
4947
import org.apache.phoenix.compat.hbase.CompatDelegateHTable;
5048

@@ -69,21 +67,11 @@ public Configuration getConfiguration() {
6967
return delegate.getConfiguration();
7068
}
7169

72-
@Override
73-
public HTableDescriptor getTableDescriptor() throws IOException {
74-
return delegate.getTableDescriptor();
75-
}
76-
7770
@Override
7871
public boolean exists(Get get) throws IOException {
7972
return delegate.exists(get);
8073
}
8174

82-
@Override
83-
public boolean[] existsAll(List<Get> gets) throws IOException {
84-
return delegate.existsAll(gets);
85-
}
86-
8775
@Override
8876
public void batch(List<? extends Row> actions, Object[] results) throws IOException,
8977
InterruptedException {
@@ -131,18 +119,6 @@ public void put(List<Put> puts) throws IOException {
131119
delegate.put(puts);
132120
}
133121

134-
@Override
135-
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, byte[] value, Put put)
136-
throws IOException {
137-
return delegate.checkAndPut(row, family, qualifier, value, put);
138-
}
139-
140-
@Override
141-
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, CompareOp compareOp,
142-
byte[] value, Put put) throws IOException {
143-
return delegate.checkAndPut(row, family, qualifier, compareOp, value, put);
144-
}
145-
146122
@Override
147123
public void delete(Delete delete) throws IOException {
148124
delegate.delete(delete);
@@ -153,18 +129,6 @@ public void delete(List<Delete> deletes) throws IOException {
153129
delegate.delete(deletes);
154130
}
155131

156-
@Override
157-
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, byte[] value,
158-
Delete delete) throws IOException {
159-
return delegate.checkAndDelete(row, family, qualifier, value, delete);
160-
}
161-
162-
@Override
163-
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, CompareOp compareOp,
164-
byte[] value, Delete delete) throws IOException {
165-
return delegate.checkAndDelete(row, family, qualifier, compareOp, value, delete);
166-
}
167-
168132
@Override
169133
public Result append(Append append) throws IOException {
170134
return delegate.append(append);
@@ -230,80 +194,11 @@ public CheckAndMutateResult checkAndMutate(CheckAndMutate checkAndMutate) throws
230194
return delegate.checkAndMutate(checkAndMutate);
231195
}
232196

233-
@Override
234-
public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareOp compareOp,
235-
byte[] value, RowMutations mutation) throws IOException {
236-
return delegate.checkAndMutate(row, family, qualifier, compareOp, value, mutation);
237-
}
238-
239-
@Override
240-
public void setOperationTimeout(int operationTimeout) {
241-
delegate.setOperationTimeout(operationTimeout);
242-
}
243-
244-
@Override
245-
public int getOperationTimeout() {
246-
return delegate.getOperationTimeout();
247-
}
248-
249-
@Override
250-
public int getRpcTimeout() {
251-
return delegate.getRpcTimeout();
252-
}
253-
254-
@Override
255-
public void setRpcTimeout(int rpcTimeout) {
256-
delegate.setRpcTimeout(rpcTimeout);
257-
}
258-
259197
@Override
260198
public TableDescriptor getDescriptor() throws IOException {
261199
return delegate.getDescriptor();
262200
}
263201

264-
@Override
265-
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, CompareOperator op,
266-
byte[] value, Put put) throws IOException {
267-
return delegate.checkAndPut(row, family, qualifier, op, value, put);
268-
}
269-
270-
@Override
271-
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, CompareOperator op,
272-
byte[] value, Delete delete) throws IOException {
273-
return delegate.checkAndDelete(row, family, qualifier, op, value, delete);
274-
}
275-
276-
@Override
277-
public CheckAndMutateBuilder checkAndMutate(byte[] row, byte[] family) {
278-
return delegate.checkAndMutate(row, family);
279-
}
280-
281-
@Override
282-
public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareOperator op,
283-
byte[] value, RowMutations mutation) throws IOException {
284-
return delegate.checkAndMutate(row, family, qualifier, op, value, mutation);
285-
}
286-
287-
@Override
288-
public int getReadRpcTimeout() {
289-
return delegate.getReadRpcTimeout();
290-
}
291-
292-
@Override
293-
public void setReadRpcTimeout(int readRpcTimeout) {
294-
delegate.setReadRpcTimeout(readRpcTimeout);
295-
}
296-
297-
@Override
298-
public int getWriteRpcTimeout() {
299-
return delegate.getWriteRpcTimeout();
300-
}
301-
302-
@Override
303-
public void setWriteRpcTimeout(int writeRpcTimeout) {
304-
delegate.setWriteRpcTimeout(writeRpcTimeout);
305-
}
306-
307202
@Override
308203
public boolean[] exists(List<Get> gets) throws IOException {
309204
return delegate.exists(gets);

phoenix-core-client/src/main/java/org/apache/phoenix/transaction/OmidTransactionTable.java

+14-15
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626

2727
import org.apache.hadoop.conf.Configuration;
2828
import org.apache.hadoop.hbase.CompareOperator;
29-
import org.apache.hadoop.hbase.HTableDescriptor;
3029
import org.apache.hadoop.hbase.TableName;
3130
import org.apache.hadoop.hbase.client.Append;
3231
import org.apache.hadoop.hbase.client.Delete;
@@ -165,7 +164,7 @@ public TableName getName() {
165164
return TableName.valueOf(name);
166165
}
167166

168-
@Override
167+
//No @Override for HBase 3 compatibility
169168
public boolean[] existsAll(List<Get> gets) throws IOException {
170169
throw new UnsupportedOperationException();
171170
}
@@ -186,13 +185,13 @@ public <R> void batchCallback(List<? extends Row> actions,
186185
throw new UnsupportedOperationException();
187186
}
188187

189-
@Override
188+
//No @Override for HBase 3 compatibility
190189
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
191190
byte[] value, Put put) throws IOException {
192191
throw new UnsupportedOperationException();
193192
}
194193

195-
@Override
194+
//No @Override for HBase 3 compatibility
196195
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
197196
byte[] value, Delete delete) throws IOException {
198197
throw new UnsupportedOperationException();
@@ -256,42 +255,42 @@ public <R extends Message> void batchCoprocessorService(
256255
throw new UnsupportedOperationException();
257256
}
258257

259-
@Override
258+
//No @Override for HBase 3 compatibility
260259
public int getOperationTimeout() {
261260
throw new UnsupportedOperationException();
262261
}
263262

264-
@Override
263+
//No @Override for HBase 3 compatibility
265264
public int getRpcTimeout() {
266265
throw new UnsupportedOperationException();
267266
}
268267

269-
@Override
268+
//No @Override for HBase 3 compatibility
270269
public void setOperationTimeout(int arg0) {
271270
throw new UnsupportedOperationException();
272271
}
273272

274-
@Override
273+
//No @Override for HBase 3 compatibility
275274
public void setRpcTimeout(int arg0) {
276275
throw new UnsupportedOperationException();
277276
}
278277

279-
@Override
278+
//No @Override for HBase 3 compatibility
280279
public int getWriteRpcTimeout() {
281280
throw new UnsupportedOperationException();
282281
}
283282

284-
@Override
283+
//No @Override for HBase 3 compatibility
285284
public void setWriteRpcTimeout(int writeRpcTimeout) {
286285
throw new UnsupportedOperationException();
287286
}
288287

289-
@Override
288+
//No @Override for HBase 3 compatibility
290289
public int getReadRpcTimeout() {
291290
throw new UnsupportedOperationException();
292291
}
293292

294-
@Override
293+
//No @Override for HBase 3 compatibility
295294
public void setReadRpcTimeout(int readRpcTimeout) {
296295
throw new UnsupportedOperationException();
297296
}
@@ -306,19 +305,19 @@ public boolean[] exists(List<Get> gets) throws IOException {
306305
throw new UnsupportedOperationException();
307306
}
308307

309-
@Override
308+
//No @Override for HBase 3 compatibility
310309
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, CompareOperator op, byte[] value, Put put)
311310
throws IOException {
312311
throw new UnsupportedOperationException();
313312
}
314313

315-
@Override
314+
//No @Override for HBase 3 compatibility
316315
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, CompareOperator op, byte[] value,
317316
Delete delete) throws IOException {
318317
throw new UnsupportedOperationException();
319318
}
320319

321-
@Override
320+
//No @Override for HBase 3 compatibility
322321
public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareOperator op, byte[] value,
323322
RowMutations mutation) throws IOException {
324323
throw new UnsupportedOperationException();

phoenix-core-server/src/main/java/org/apache/phoenix/coprocessor/MetaDataRegionObserver.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@
4040
import org.apache.hadoop.conf.Configuration;
4141
import org.apache.hadoop.hbase.Cell;
4242
import org.apache.hadoop.hbase.CellUtil;
43+
import org.apache.hadoop.hbase.CompareOperator;
4344
import org.apache.hadoop.hbase.CoprocessorEnvironment;
4445
import org.apache.hadoop.hbase.HConstants;
4546
import org.apache.hadoop.hbase.HRegionLocation;
@@ -307,7 +308,7 @@ public void run() {
307308
Scan scan = new Scan();
308309
SingleColumnValueFilter filter = new SingleColumnValueFilter(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,
309310
PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP_BYTES,
310-
CompareFilter.CompareOp.NOT_EQUAL, PLong.INSTANCE.toBytes(0L));
311+
CompareOperator.NOT_EQUAL, PLong.INSTANCE.toBytes(0L));
311312
filter.setFilterIfMissing(true);
312313
scan.setFilter(filter);
313314
scan.addColumn(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,

phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java

+2-3
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,6 @@
4040
import java.util.Properties;
4141

4242
import org.apache.commons.lang3.ArrayUtils;
43-
import org.apache.hadoop.hbase.client.HTable;
4443
import org.apache.hadoop.hbase.client.Result;
4544
import org.apache.hadoop.hbase.client.ResultScanner;
4645
import org.apache.hadoop.hbase.client.Scan;
@@ -1013,7 +1012,7 @@ public void testMakeBaseTableTransactional() throws Exception {
10131012

10141013
PName tenantId = isMultiTenant ? PNameFactory.newName(TENANT1) : null;
10151014
PhoenixConnection phoenixConn = conn.unwrap(PhoenixConnection.class);
1016-
Table htable = phoenixConn.getQueryServices().getTable(Bytes.toBytes(baseTableName));
1015+
Table table = phoenixConn.getQueryServices().getTable(Bytes.toBytes(baseTableName));
10171016
assertFalse(phoenixConn.getTable(new PTableKey(null, baseTableName)).isTransactional());
10181017
assertFalse(viewConn.unwrap(PhoenixConnection.class).getTable(new PTableKey(tenantId, viewOfTable)).isTransactional());
10191018
}
@@ -1196,7 +1195,7 @@ public void testDroppingIndexedColDropsViewIndex() throws Exception {
11961195

11971196
// scan the physical table and verify there is a single row for the second local index
11981197
Scan scan = new Scan();
1199-
HTable table = (HTable) conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(viewIndexPhysicalTable);
1198+
Table table = conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(viewIndexPhysicalTable);
12001199
ResultScanner results = table.getScanner(scan);
12011200
Result result = results.next();
12021201
assertNotNull(result);

phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewTTLNotEnabledIT.java

-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818

1919
package org.apache.phoenix.end2end;
2020

21-
import org.apache.hadoop.hbase.HTableDescriptor;
2221
import org.apache.hadoop.hbase.TableName;
2322
import org.apache.hadoop.hbase.client.Admin;
2423
import org.apache.phoenix.compile.QueryPlan;

phoenix-core/src/it/java/org/apache/phoenix/end2end/index/BaseIndexIT.java

+1-2
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,6 @@
4848
import org.apache.hadoop.hbase.CellScanner;
4949
import org.apache.hadoop.hbase.CellUtil;
5050
import org.apache.hadoop.hbase.HConstants;
51-
import org.apache.hadoop.hbase.client.HTable;
5251
import org.apache.hadoop.hbase.client.Result;
5352
import org.apache.hadoop.hbase.client.ResultScanner;
5453
import org.apache.hadoop.hbase.client.Scan;
@@ -914,7 +913,7 @@ public void testUpsertAfterIndexDrop() throws Exception {
914913
conn.commit();
915914

916915
// the index table is one row
917-
HTable table = (HTable) conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(fullTableName.getBytes());
916+
Table table = conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(fullTableName.getBytes());
918917
ResultScanner resultScanner = table.getScanner(new Scan());
919918
for (Result result : resultScanner) {
920919
System.out.println(result);

phoenix-core/src/it/java/org/apache/phoenix/end2end/index/BaseIndexWithRegionMovesIT.java

+1-2
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@
2222
import org.apache.hadoop.hbase.CellScanner;
2323
import org.apache.hadoop.hbase.CellUtil;
2424
import org.apache.hadoop.hbase.HConstants;
25-
import org.apache.hadoop.hbase.client.HTable;
2625
import org.apache.hadoop.hbase.client.Result;
2726
import org.apache.hadoop.hbase.client.ResultScanner;
2827
import org.apache.hadoop.hbase.client.Scan;
@@ -1028,7 +1027,7 @@ public void testUpsertAfterIndexDrop() throws Exception {
10281027
conn.commit();
10291028

10301029
// the index table is one row
1031-
HTable table = (HTable) conn.unwrap(PhoenixConnection.class).getQueryServices()
1030+
Table table = conn.unwrap(PhoenixConnection.class).getQueryServices()
10321031
.getTable(fullTableName.getBytes());
10331032
ResultScanner resultScanner = table.getScanner(new Scan());
10341033
for (Result result : resultScanner) {

phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ImmutableIndexExtendedIT.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -20,11 +20,11 @@
2020
import org.apache.phoenix.thirdparty.com.google.common.collect.Lists;
2121
import org.apache.phoenix.thirdparty.com.google.common.collect.Maps;
2222
import org.apache.hadoop.hbase.HConstants;
23-
import org.apache.hadoop.hbase.client.HTable;
2423
import org.apache.hadoop.hbase.client.Mutation;
2524
import org.apache.hadoop.hbase.client.Result;
2625
import org.apache.hadoop.hbase.client.ResultScanner;
2726
import org.apache.hadoop.hbase.client.Scan;
27+
import org.apache.hadoop.hbase.client.Table;
2828
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
2929
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
3030
import org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver;
@@ -225,7 +225,7 @@ public static int getRowCountForEmptyColValue(Connection conn, String tableName,
225225
byte[] emptyCQ = EncodedColumnsUtil.getEmptyKeyValueInfo(table).getFirst();
226226
ConnectionQueryServices queryServices =
227227
conn.unwrap(PhoenixConnection.class).getQueryServices();
228-
HTable htable = (HTable) queryServices.getTable(table.getPhysicalName().getBytes());
228+
Table htable = queryServices.getTable(table.getPhysicalName().getBytes());
229229
Scan scan = new Scan();
230230
scan.addColumn(emptyCF, emptyCQ);
231231
ResultScanner resultScanner = htable.getScanner(scan);

0 commit comments

Comments
 (0)