Skip to content

Commit cc41ecc

Browse files
committed
Create Converter for Redshit sink
1 parent 3c92fb6 commit cc41ecc

File tree

6 files changed

+124
-7
lines changed

6 files changed

+124
-7
lines changed

flink-connector-aws/flink-connector-redshift/src/main/java/org/apache/flink/connector/redshift/converter/AbstractRedshiftRowConverter.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
* limitations under the License.
1717
*/
1818

19-
package org.apache.flink.connector.redshift.mode.converter;
19+
package org.apache.flink.connector.redshift.converter;
2020

2121
import org.apache.flink.annotation.Internal;
2222
import org.apache.flink.table.data.DecimalData;
@@ -36,7 +36,7 @@
3636
import java.time.LocalTime;
3737
import java.util.UUID;
3838

39-
import static org.apache.flink.connector.redshift.mode.converter.RedshiftConverterUtil.toEpochDayOneTimestamp;
39+
import static org.apache.flink.connector.redshift.converter.RedshiftConverterUtil.toEpochDayOneTimestamp;
4040

4141
/** Base class for all converters that convert between JDBC object and Flink internal object. */
4242
@Internal

flink-connector-aws/flink-connector-redshift/src/main/java/org/apache/flink/connector/redshift/converter/DeserializationConverter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
* limitations under the License.
1717
*/
1818

19-
package org.apache.flink.connector.redshift.mode.converter;
19+
package org.apache.flink.connector.redshift.converter;
2020

2121
import org.apache.flink.annotation.Internal;
2222

flink-connector-aws/flink-connector-redshift/src/main/java/org/apache/flink/connector/redshift/converter/RedshiftConverterUtil.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
* limitations under the License.
1717
*/
1818

19-
package org.apache.flink.connector.redshift.mode.converter;
19+
package org.apache.flink.connector.redshift.converter;
2020

2121
import org.apache.flink.annotation.Internal;
2222
import org.apache.flink.table.data.ArrayData;

flink-connector-aws/flink-connector-redshift/src/main/java/org/apache/flink/connector/redshift/converter/RedshiftRowConverter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
* limitations under the License.
1717
*/
1818

19-
package org.apache.flink.connector.redshift.mode.converter;
19+
package org.apache.flink.connector.redshift.converter;
2020

2121
import org.apache.flink.annotation.Internal;
2222
import org.apache.flink.table.data.RowData;

flink-connector-aws/flink-connector-redshift/src/main/java/org/apache/flink/connector/redshift/converter/SerializationConverter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
* limitations under the License.
1717
*/
1818

19-
package org.apache.flink.connector.redshift.mode.converter;
19+
package org.apache.flink.connector.redshift.converter;
2020

2121
import org.apache.flink.annotation.Internal;
2222
import org.apache.flink.table.data.RowData;
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,119 @@
1-
package org.apache.flink.connector.redshift.mode.copy;public class RedshiftCopyModeRowConverterImpl {
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.flink.connector.redshift.mode.copy;
20+
21+
import org.apache.flink.annotation.Internal;
22+
import org.apache.flink.connector.redshift.converter.AbstractRedshiftRowConverter;
23+
import org.apache.flink.table.data.RowData;
24+
import org.apache.flink.table.types.logical.DecimalType;
25+
import org.apache.flink.table.types.logical.LogicalType;
26+
import org.apache.flink.table.types.logical.TimestampType;
27+
28+
import java.sql.ResultSet;
29+
import java.time.LocalDate;
30+
import java.time.format.DateTimeFormatter;
31+
import java.util.ArrayList;
32+
33+
/** Converter Implementation for Redshift in COPY Mode. */
34+
@Internal
35+
public class RedshiftCopyModeRowConverterImpl extends AbstractRedshiftRowConverter {
36+
37+
public static final String DATE_TIME_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS";
38+
39+
private final LogicalType[] fieldTypes;
40+
41+
public RedshiftCopyModeRowConverterImpl(LogicalType[] fieldTypes) {
42+
this.fieldTypes = fieldTypes;
43+
}
44+
45+
@Override
46+
public Object toExternal(RowData rowData) {
47+
ArrayList<String> csvLine = new ArrayList<>();
48+
for (int index = 0; index < rowData.getArity(); index++) {
49+
LogicalType type = fieldTypes[index];
50+
StringBuilder sb = new StringBuilder();
51+
switch (type.getTypeRoot()) {
52+
case BOOLEAN:
53+
sb.append(rowData.getBoolean(index));
54+
break;
55+
case FLOAT:
56+
sb.append(rowData.getFloat(index));
57+
break;
58+
case DOUBLE:
59+
sb.append(rowData.getDouble(index));
60+
break;
61+
case INTERVAL_YEAR_MONTH:
62+
case INTEGER:
63+
sb.append(rowData.getInt(index));
64+
break;
65+
case INTERVAL_DAY_TIME:
66+
case BIGINT:
67+
sb.append(rowData.getLong(index));
68+
break;
69+
case TINYINT:
70+
case SMALLINT:
71+
case CHAR:
72+
case VARCHAR:
73+
sb.append(rowData.getString(index).toString());
74+
break;
75+
case BINARY:
76+
case VARBINARY:
77+
case DATE:
78+
sb.append(
79+
LocalDate.ofEpochDay(rowData.getInt(index))
80+
.format(DateTimeFormatter.ISO_DATE));
81+
break;
82+
case TIME_WITHOUT_TIME_ZONE:
83+
case TIMESTAMP_WITH_TIME_ZONE:
84+
case TIMESTAMP_WITHOUT_TIME_ZONE:
85+
final int timestampPrecision = ((TimestampType) type).getPrecision();
86+
final DateTimeFormatter dateTimeFormatter =
87+
DateTimeFormatter.ofPattern(DATE_TIME_FORMAT);
88+
sb.append(
89+
rowData.getTimestamp(index, timestampPrecision)
90+
.toLocalDateTime()
91+
.format(dateTimeFormatter));
92+
break;
93+
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
94+
95+
case DECIMAL:
96+
final int decimalPrecision = ((DecimalType) type).getPrecision();
97+
final int decimalScale = ((DecimalType) type).getScale();
98+
sb.append(rowData.getDecimal(index, decimalPrecision, decimalScale));
99+
break;
100+
case ARRAY:
101+
102+
case MAP:
103+
case MULTISET:
104+
case ROW:
105+
case RAW:
106+
default:
107+
throw new UnsupportedOperationException("Unsupported type:" + type);
108+
}
109+
csvLine.add(sb.toString());
110+
}
111+
return csvLine.toArray(new String[0]);
112+
}
113+
114+
@Override
115+
public RowData toInternal(ResultSet resultSet) {
116+
throw new UnsupportedOperationException(
117+
"COPY Mode is supported only for write to Redshift in batch manner.");
118+
}
2119
}

0 commit comments

Comments
 (0)