diff --git a/core/src/main/java/org/opensearch/sql/calcite/CalcitePlanContext.java b/core/src/main/java/org/opensearch/sql/calcite/CalcitePlanContext.java index 79a37e3bcf..5e6d81a9f3 100644 --- a/core/src/main/java/org/opensearch/sql/calcite/CalcitePlanContext.java +++ b/core/src/main/java/org/opensearch/sql/calcite/CalcitePlanContext.java @@ -20,6 +20,7 @@ import org.opensearch.sql.ast.expression.UnresolvedExpression; import org.opensearch.sql.calcite.utils.CalciteToolsHelper; import org.opensearch.sql.executor.QueryType; +import org.opensearch.sql.expression.function.FunctionProperties; public class CalcitePlanContext { @@ -27,6 +28,7 @@ public class CalcitePlanContext { public final Connection connection; public final RelBuilder relBuilder; public final ExtendedRexBuilder rexBuilder; + public final FunctionProperties functionProperties; public final QueryType queryType; @Getter @Setter private boolean isResolvingJoinCondition = false; @@ -39,6 +41,7 @@ private CalcitePlanContext(FrameworkConfig config, QueryType queryType) { this.connection = CalciteToolsHelper.connect(config, TYPE_FACTORY); this.relBuilder = CalciteToolsHelper.create(config, TYPE_FACTORY, connection); this.rexBuilder = new ExtendedRexBuilder(relBuilder.getRexBuilder()); + this.functionProperties = new FunctionProperties(QueryType.PPL); } public RexNode resolveJoinCondition( diff --git a/core/src/main/java/org/opensearch/sql/calcite/CalciteRexNodeVisitor.java b/core/src/main/java/org/opensearch/sql/calcite/CalciteRexNodeVisitor.java index 5b76741e3e..dfbe24a264 100644 --- a/core/src/main/java/org/opensearch/sql/calcite/CalciteRexNodeVisitor.java +++ b/core/src/main/java/org/opensearch/sql/calcite/CalciteRexNodeVisitor.java @@ -7,8 +7,7 @@ import static org.opensearch.sql.ast.expression.SpanUnit.NONE; import static org.opensearch.sql.ast.expression.SpanUnit.UNKNOWN; -import static org.opensearch.sql.calcite.utils.BuiltinFunctionUtils.translateArgument; -import static org.opensearch.sql.calcite.utils.PlanUtils.intervalUnitToSpanUnit; +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.TransferUserDefinedFunction; import java.math.BigDecimal; import java.util.List; @@ -22,6 +21,7 @@ import org.apache.calcite.sql.SqlIntervalQualifier; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.fun.SqlStdOperatorTable; +import org.apache.calcite.sql.type.ReturnTypes; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.util.DateString; import org.apache.calcite.util.TimeString; @@ -52,8 +52,11 @@ import org.opensearch.sql.ast.expression.subquery.InSubquery; import org.opensearch.sql.ast.expression.subquery.ScalarSubquery; import org.opensearch.sql.ast.tree.UnresolvedPlan; +import org.opensearch.sql.calcite.type.ExprSqlType; +import org.opensearch.sql.calcite.udf.datetimeUDF.PostprocessDateToStringFunction; import org.opensearch.sql.calcite.utils.BuiltinFunctionUtils; import org.opensearch.sql.calcite.utils.OpenSearchTypeFactory; +import org.opensearch.sql.calcite.utils.PlanUtils; import org.opensearch.sql.common.utils.StringUtils; import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.exception.CalciteUnsupportedException; @@ -122,7 +125,7 @@ public RexNode visitLiteral(Literal node, CalcitePlanContext context) { public RexNode visitInterval(Interval node, CalcitePlanContext context) { RexNode value = analyze(node.getValue(), context); SqlIntervalQualifier intervalQualifier = - context.rexBuilder.createIntervalUntil(intervalUnitToSpanUnit(node.getUnit())); + context.rexBuilder.createIntervalUntil(PlanUtils.intervalUnitToSpanUnit(node.getUnit())); return context.rexBuilder.makeIntervalLiteral( new BigDecimal(value.toString()), intervalQualifier); } @@ -182,11 +185,40 @@ public RexNode visitIn(In node, CalcitePlanContext context) { @Override public RexNode visitCompare(Compare node, CalcitePlanContext context) { SqlOperator op = BuiltinFunctionUtils.translate(node.getOperator()); - final RexNode left = analyze(node.getLeft(), context); - final RexNode right = analyze(node.getRight(), context); + RexNode leftCandidate = analyze(node.getLeft(), context); + RexNode rightCandidate = analyze(node.getRight(), context); + Boolean whetherCompareByTime = + leftCandidate.getType() instanceof ExprSqlType + || rightCandidate.getType() instanceof ExprSqlType; + + final RexNode left = + transferCompareForDateRelated(leftCandidate, context, whetherCompareByTime); + final RexNode right = + transferCompareForDateRelated(rightCandidate, context, whetherCompareByTime); return context.relBuilder.call(op, left, right); } + private RexNode transferCompareForDateRelated( + RexNode candidate, CalcitePlanContext context, boolean whetherCompareByTime) { + if (whetherCompareByTime) { + SqlOperator postToStringNode = + TransferUserDefinedFunction( + PostprocessDateToStringFunction.class, + "PostprocessDateToString", + ReturnTypes.CHAR_FORCE_NULLABLE); + RexNode transferredStringNode = + context.rexBuilder.makeCall( + postToStringNode, + List.of( + candidate, + context.rexBuilder.makeLiteral( + context.functionProperties.getQueryStartClock().instant().toString()))); + return transferredStringNode; + } else { + return candidate; + } + } + @Override public RexNode visitBetween(Between node, CalcitePlanContext context) { RexNode value = analyze(node.getValue(), context); @@ -322,9 +354,17 @@ public RexNode visitLet(Let node, CalcitePlanContext context) { public RexNode visitFunction(Function node, CalcitePlanContext context) { List arguments = node.getFuncArgs().stream().map(arg -> analyze(arg, context)).collect(Collectors.toList()); - return context.rexBuilder.makeCall( - BuiltinFunctionUtils.translate(node.getFuncName()), - translateArgument(node.getFuncName(), arguments, context)); + SqlOperator operator = BuiltinFunctionUtils.translate(node.getFuncName()); + List translatedArguments = + BuiltinFunctionUtils.translateArgument( + node.getFuncName(), + arguments, + context, + context.functionProperties.getQueryStartClock().instant().toString()); + RelDataType returnType = + BuiltinFunctionUtils.deriveReturnType( + node.getFuncName(), context.rexBuilder, operator, translatedArguments); + return context.rexBuilder.makeCall(returnType, operator, translatedArguments); } @Override diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/UserDefinedAggFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/UserDefinedAggFunction.java index f87a5bac62..ce80a974fa 100644 --- a/core/src/main/java/org/opensearch/sql/calcite/udf/UserDefinedAggFunction.java +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/UserDefinedAggFunction.java @@ -5,6 +5,9 @@ package org.opensearch.sql.calcite.udf; +/** + * TODO. support init with constant arguments https://github.com/opensearch-project/sql/issues/3490 + */ public interface UserDefinedAggFunction { /** * @return {@link Accumulator} @@ -32,6 +35,6 @@ interface Accumulator { /** * @return the final aggregation value */ - Object value(); + Object value(Object... args); } } diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/UserDefinedFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/UserDefinedFunction.java index 20908943c4..1e28dc4ce0 100644 --- a/core/src/main/java/org/opensearch/sql/calcite/udf/UserDefinedFunction.java +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/UserDefinedFunction.java @@ -5,6 +5,9 @@ package org.opensearch.sql.calcite.udf; +/** + * TODO. support init with constant arguments https://github.com/opensearch-project/sql/issues/3490 + */ public interface UserDefinedFunction { Object eval(Object... args); } diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/ConvertTZFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/ConvertTZFunction.java new file mode 100644 index 0000000000..ee7289844c --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/ConvertTZFunction.java @@ -0,0 +1,33 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprStringValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.datetime.DateTimeFunctions; + +public class ConvertTZFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + + Object argTimestamp = args[0]; + Object fromTz = args[1]; + Object toTz = args[2]; + ExprValue datetimeExpr = + DateTimeFunctions.exprConvertTZ( + new ExprStringValue(argTimestamp.toString()), + new ExprStringValue(fromTz.toString()), + new ExprStringValue(toTz.toString())); + + return datetimeExpr.valueForCalcite(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DateAddSubFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DateAddSubFunction.java new file mode 100644 index 0000000000..ee9fa0eefb --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DateAddSubFunction.java @@ -0,0 +1,61 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.*; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.convertToTemporalAmount; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; + +import org.apache.calcite.avatica.util.TimeUnit; +import org.apache.calcite.rel.type.RelDataType; +import org.apache.calcite.sql.type.SqlReturnTypeInference; +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprDateValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.datetime.DateTimeFunctions; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class DateAddSubFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + + TimeUnit unit = (TimeUnit) args[0]; + long interval = ((Number) args[1]).longValue(); + Object argBase = args[2]; + SqlTypeName sqlTypeName = (SqlTypeName) args[3]; + boolean isAdd = (Boolean) args[4]; + SqlTypeName returnSqlType = (SqlTypeName) args[5]; + ExprValue base = transferInputToExprValue(argBase, sqlTypeName); + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + ExprValue resultDatetime = + DateTimeFunctions.exprDateApplyInterval( + restored, base, convertToTemporalAmount(interval, unit), isAdd); + if (returnSqlType == SqlTypeName.TIMESTAMP) { + return resultDatetime.valueForCalcite(); + } else { + return new ExprDateValue(resultDatetime.dateValue()).valueForCalcite(); + } + } + + public static SqlReturnTypeInference getReturnTypeForAddOrSubDate() { + return opBinding -> { + RelDataType operandType0 = opBinding.getOperandType(6); + SqlTypeName typeName = operandType0.getSqlTypeName(); + if (typeName == SqlTypeName.TIMESTAMP) { + return nullableTimestampUDT; + } else if (typeName == SqlTypeName.DATE) { + return nullableDateUDT; + } + return opBinding.getTypeFactory().createSqlType(typeName); + }; + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DateDiffFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DateDiffFunction.java new file mode 100644 index 0000000000..34e619b23c --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DateDiffFunction.java @@ -0,0 +1,41 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.OpenSearchTypeFactory.convertSqlTypeNameToExprType; +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.data.model.ExprValueUtils.fromObjectValue; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.datetime.DateTimeFunctions; +import org.opensearch.sql.expression.function.FunctionProperties; + +/** + * Calculates the difference of date parts of given values. If the first argument is time, today's + * date is used. + * + *

(DATE/TIMESTAMP/TIME, DATE/TIMESTAMP/TIME) -> LONG + */ +public class DateDiffFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + SqlTypeName sqlTypeName1 = (SqlTypeName) args[1]; + SqlTypeName sqlTypeName2 = (SqlTypeName) args[3]; + ExprValue diffResult = + DateTimeFunctions.exprDateDiff( + restored, + fromObjectValue(args[0], convertSqlTypeNameToExprType(sqlTypeName1)), + fromObjectValue(args[2], convertSqlTypeNameToExprType(sqlTypeName2))); + return diffResult.longValue(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DateFormatFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DateFormatFunction.java new file mode 100644 index 0000000000..22da1de27b --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DateFormatFunction.java @@ -0,0 +1,41 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFormatterUtil.getFormattedDate; +import static org.opensearch.sql.expression.datetime.DateTimeFormatterUtil.getFormattedDateOfToday; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprStringValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class DateFormatFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + Object argDatetime = args[0]; + Object argDatetimeType = args[1]; + Object argFormat = args[2]; + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + ExprValue candidateValue = transferInputToExprValue(argDatetime, (SqlTypeName) argDatetimeType); + if (argDatetimeType == SqlTypeName.TIME) { + return getFormattedDateOfToday( + new ExprStringValue(argFormat.toString()), + candidateValue, + restored.getQueryStartClock()) + .stringValue(); + } + return getFormattedDate(candidateValue, new ExprStringValue(argFormat.toString())) + .stringValue(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DateFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DateFunction.java new file mode 100644 index 0000000000..0a0f9fd0b4 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DateFunction.java @@ -0,0 +1,34 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprDate; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprDateValue; +import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class DateFunction implements UserDefinedFunction { + + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + ExprValue candidate = transferInputToExprValue(args[0], (SqlTypeName) args[1]); + if ((SqlTypeName) args[1] == SqlTypeName.TIME) { + return new ExprDateValue(((ExprTimeValue) candidate).dateValue(restored)).valueForCalcite(); + } + return exprDate(candidate).valueForCalcite(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DatetimeFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DatetimeFunction.java new file mode 100644 index 0000000000..f84847596b --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DatetimeFunction.java @@ -0,0 +1,42 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprStringValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.datetime.DateTimeFunctions; + +/** + * DATETIME(timestamp)/ DATETIME(date, to_timezone) Converts the datetime to a new timezone. If not + * specified, the timestamp is regarded to be in system time zone. + * + *

(TIMESTAMP, STRING) -> TIMESTAMP
+ * (TIMESTAMP) -> TIMESTAMP + * + *

Converting timestamp with timezone to the second argument timezone. + */ +public class DatetimeFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + Object argTimestamp = args[0]; + ExprValue argTimestampExpr = new ExprStringValue(argTimestamp.toString()); + ExprValue datetimeExpr; + if (args.length == 1) { + datetimeExpr = DateTimeFunctions.exprDateTimeNoTimezone(argTimestampExpr); + } else { + Object argTimezone = args[1]; + datetimeExpr = + DateTimeFunctions.exprDateTime( + argTimestampExpr, new ExprStringValue(argTimezone.toString())); + } + return datetimeExpr.valueForCalcite(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DayFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DayFunction.java new file mode 100644 index 0000000000..f53650580a --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DayFunction.java @@ -0,0 +1,40 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferTimeToTimestamp; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprDayOfMonth; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class DayFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + ExprValue candidate = transferInputToExprValue(args[0], (SqlTypeName) args[1]); + if ((SqlTypeName) args[1] == SqlTypeName.TIME) { + return extractForTime(candidate, restored).valueForCalcite(); + } + return extract(candidate).valueForCalcite(); + } + + public ExprValue extractForTime(ExprValue candidate, FunctionProperties functionProperties) { + return exprDayOfMonth(transferTimeToTimestamp(candidate, functionProperties)); + } + + public ExprValue extract(ExprValue candidate) { + return exprDayOfMonth(candidate); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DayOfWeekFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DayOfWeekFunction.java new file mode 100644 index 0000000000..b1f51ef0f6 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DayOfWeekFunction.java @@ -0,0 +1,40 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.dayOfWeekToday; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprDayOfWeek; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class DayOfWeekFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + ExprValue candidate = transferInputToExprValue(args[0], (SqlTypeName) args[1]); + if ((SqlTypeName) args[1] == SqlTypeName.TIME) { + return extractForTime(candidate, restored).valueForCalcite(); + } + return extract(candidate).valueForCalcite(); + } + + public ExprValue extractForTime(ExprValue candidate, FunctionProperties functionProperties) { + return dayOfWeekToday(functionProperties.getQueryStartClock()); + } + + public ExprValue extract(ExprValue candidate) { + return exprDayOfWeek(candidate); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DayOfYearFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DayOfYearFunction.java new file mode 100644 index 0000000000..373803db24 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/DayOfYearFunction.java @@ -0,0 +1,40 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.dayOfYearToday; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprDayOfYear; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class DayOfYearFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + ExprValue candidate = transferInputToExprValue(args[0], (SqlTypeName) args[1]); + if ((SqlTypeName) args[1] == SqlTypeName.TIME) { + return extractForTime(candidate, restored).valueForCalcite(); + } + return extract(candidate).valueForCalcite(); + } + + public ExprValue extractForTime(ExprValue candidate, FunctionProperties functionProperties) { + return dayOfYearToday(functionProperties.getQueryStartClock()); + } + + public ExprValue extract(ExprValue candidate) { + return exprDayOfYear(candidate); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/ExtractFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/ExtractFunction.java new file mode 100644 index 0000000000..b48022b25a --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/ExtractFunction.java @@ -0,0 +1,42 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.OpenSearchTypeFactory.convertSqlTypeNameToExprType; +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.data.model.ExprValueUtils.fromObjectValue; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprStringValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.datetime.DateTimeFunctions; +import org.opensearch.sql.expression.function.FunctionProperties; + +// TODO: Fix MICROSECOND precision, it is not correct with Calcite timestamp +public class ExtractFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + Object argPart = args[0]; + Object argTimestamp = args[1]; + SqlTypeName argType = (SqlTypeName) args[2]; + + ExprValue candidate = fromObjectValue(argTimestamp, convertSqlTypeNameToExprType(argType)); + if (argType == SqlTypeName.TIME) { + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + return DateTimeFunctions.exprExtractForTime( + restored, new ExprStringValue(argPart.toString()), candidate) + .longValue(); + } + return DateTimeFunctions.formatExtractFunction( + new ExprStringValue(argPart.toString()), candidate) + .longValue(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/FromDaysFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/FromDaysFunction.java new file mode 100644 index 0000000000..dc466c2cb9 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/FromDaysFunction.java @@ -0,0 +1,24 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprLongValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.datetime.DateTimeFunctions; + +public class FromDaysFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + Number argDays = (Number) args[0]; + ExprValue dateExpr = DateTimeFunctions.exprFromDays(new ExprLongValue(argDays.longValue())); + return dateExpr.valueForCalcite(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/FromUnixTimestampFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/FromUnixTimestampFunction.java new file mode 100644 index 0000000000..657585b188 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/FromUnixTimestampFunction.java @@ -0,0 +1,69 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.nullableTimestampUDT; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprFromUnixTime; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprFromUnixTimeFormat; + +import java.util.List; +import org.apache.calcite.rel.type.RelDataType; +import org.apache.calcite.rel.type.RelDataTypeFactory; +import org.apache.calcite.sql.type.SqlReturnTypeInference; +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprDoubleValue; +import org.opensearch.sql.data.model.ExprStringValue; + +/** + * DOUBLE -> DATETIME DOUBLE, STRING -> STRING Mimic implementation from + * DATETIMEFUNCTIONS::from_unixtime + */ +public class FromUnixTimestampFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + if (args.length == 1) { + // Double input + Object value = args[0]; + if (!(value instanceof Number)) { + throw new IllegalArgumentException( + "If only 1 argument for from_unixtimestamp function, then it should be number."); + + } else { + double input = ((Number) value).doubleValue(); + return exprFromUnixTime(new ExprDoubleValue(input)).valueForCalcite(); + } + } else if (args.length == 2) { + Object value = args[0]; + Object target = args[1]; + return exprFromUnixTimeFormat( + new ExprDoubleValue((Number) value), new ExprStringValue((String) target)) + .valueForCalcite(); + } else { + throw new IllegalArgumentException("Too many arguments for from_unixtimestamp function"); + } + } + + public static SqlReturnTypeInference interReturnTypes() { + return opBinding -> { + RelDataTypeFactory typeFactory = opBinding.getTypeFactory(); + + List argTypes = opBinding.collectOperandTypes(); + + if (argTypes.isEmpty()) { + throw new IllegalArgumentException("Function requires at least one argument."); + } + if (argTypes.size() == 1) { + return nullableTimestampUDT; + } + return typeFactory.createSqlType(SqlTypeName.CHAR); + }; + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/GetFormatFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/GetFormatFunction.java new file mode 100644 index 0000000000..5e05be2d2c --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/GetFormatFunction.java @@ -0,0 +1,28 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprStringValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.datetime.DateTimeFunctions; + +public class GetFormatFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + Object argType = args[0]; + Object argStandard = args[1]; + + ExprValue fmt = + DateTimeFunctions.exprGetFormat( + new ExprStringValue(argType.toString()), new ExprStringValue(argStandard.toString())); + return fmt.valueForCalcite(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/HourFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/HourFunction.java new file mode 100644 index 0000000000..20a84343c6 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/HourFunction.java @@ -0,0 +1,39 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprHour; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class HourFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + ExprValue candidate = transferInputToExprValue(args[0], (SqlTypeName) args[1]); + if ((SqlTypeName) args[1] == SqlTypeName.TIME) { + return extractForTime(candidate, restored).valueForCalcite(); + } + return extract(candidate).valueForCalcite(); + } + + public ExprValue extractForTime(ExprValue candidate, FunctionProperties functionProperties) { + return exprHour(candidate); + } + + public ExprValue extract(ExprValue candidate) { + return exprHour(candidate); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/LastDayFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/LastDayFunction.java new file mode 100644 index 0000000000..77af51ea25 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/LastDayFunction.java @@ -0,0 +1,32 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprLastDay; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprLastDayToday; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class LastDayFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + ExprValue candidate = transferInputToExprValue(args[0], (SqlTypeName) args[1]); + if ((SqlTypeName) args[1] == SqlTypeName.TIME) { + return exprLastDayToday(restored.getQueryStartClock()).valueForCalcite(); + } + return exprLastDay(candidate).valueForCalcite(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/MakeDateFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/MakeDateFunction.java new file mode 100644 index 0000000000..5d98b05d45 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/MakeDateFunction.java @@ -0,0 +1,43 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.expression.function.FunctionDSL.nullMissingHandling; + +import com.google.common.collect.ImmutableList; +import java.util.Arrays; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprDoubleValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.datetime.DateTimeFunctions; + +/** + * Returns a date, given year and day-of-year values. dayofyear must be greater than 0 or the result + * is NULL. The result is also NULL if either argument is NULL. Arguments are rounded to an integer. + * + *

Limitations: - Zero year interpreted as 2000; - Negative year is not accepted; - day-of-year + * should be greater than zero; - day-of-year could be greater than 365/366, calculation switches to + * the next year(s) + */ +public class MakeDateFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + UserDefinedFunctionUtils.validateArgumentCount("MAKE_DATE", 2, args.length, false); + UserDefinedFunctionUtils.validateArgumentTypes( + Arrays.asList(args), + ImmutableList.of(Number.class, Number.class), + ImmutableList.of(true, true)); + + ExprDoubleValue v1 = new ExprDoubleValue((Number) args[0]); + ExprDoubleValue v2 = new ExprDoubleValue((Number) args[1]); + ExprValue date = nullMissingHandling(DateTimeFunctions::exprMakeDate).apply(v1, v2); + return date.valueForCalcite(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/MakeTimeFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/MakeTimeFunction.java new file mode 100644 index 0000000000..df0a9d637e --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/MakeTimeFunction.java @@ -0,0 +1,27 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprDoubleValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.datetime.DateTimeFunctions; + +public class MakeTimeFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + ExprValue timeExpr = + DateTimeFunctions.exprMakeTime( + new ExprDoubleValue(((Number) args[0]).doubleValue()), + new ExprDoubleValue(((Number) args[1]).doubleValue()), + new ExprDoubleValue(((Number) args[2]).doubleValue())); + return timeExpr.valueForCalcite(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/MicrosecondFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/MicrosecondFunction.java new file mode 100644 index 0000000000..2b244a914c --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/MicrosecondFunction.java @@ -0,0 +1,27 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.OpenSearchTypeFactory.convertSqlTypeNameToExprType; +import static org.opensearch.sql.data.model.ExprValueUtils.fromObjectValue; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.datetime.DateTimeFunctions; + +public class MicrosecondFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + ExprValue candidate = + fromObjectValue(args[0], convertSqlTypeNameToExprType((SqlTypeName) args[1])); + return DateTimeFunctions.exprMicrosecond(candidate).integerValue(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/MinuteFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/MinuteFunction.java new file mode 100644 index 0000000000..43e981055a --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/MinuteFunction.java @@ -0,0 +1,39 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprMinute; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class MinuteFunction implements UserDefinedFunction { + public ExprValue extractForTime(ExprValue candidate, FunctionProperties functionProperties) { + return exprMinute(candidate); + } + + public ExprValue extract(ExprValue candidate) { + return exprMinute(candidate); + } + + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + ExprValue candidate = transferInputToExprValue(args[0], (SqlTypeName) args[1]); + if ((SqlTypeName) args[1] == SqlTypeName.TIME) { + return extractForTime(candidate, restored).valueForCalcite(); + } + return extract(candidate).valueForCalcite(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/MinuteOfDayFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/MinuteOfDayFunction.java new file mode 100644 index 0000000000..c9c3e20fc1 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/MinuteOfDayFunction.java @@ -0,0 +1,28 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.OpenSearchTypeFactory.convertSqlTypeNameToExprType; +import static org.opensearch.sql.data.model.ExprValueUtils.fromObjectValue; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.datetime.DateTimeFunctions; + +/** minute(time) returns the amount of minutes in the day, in the range of 0 to 1439. */ +public class MinuteOfDayFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + ExprValue candidate = + fromObjectValue(args[0], convertSqlTypeNameToExprType((SqlTypeName) args[1])); + return DateTimeFunctions.exprMinuteOfDay(candidate).integerValue(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/MonthFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/MonthFunction.java new file mode 100644 index 0000000000..85a91e5ae2 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/MonthFunction.java @@ -0,0 +1,40 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprMonth; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.monthOfYearToday; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class MonthFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + ExprValue candidate = transferInputToExprValue(args[0], (SqlTypeName) args[1]); + if ((SqlTypeName) args[1] == SqlTypeName.TIME) { + return extractForTime(candidate, restored).valueForCalcite(); + } + return extract(candidate).valueForCalcite(); + } + + public ExprValue extractForTime(ExprValue candidate, FunctionProperties functionProperties) { + return monthOfYearToday(functionProperties.getQueryStartClock()); + } + + public ExprValue extract(ExprValue candidate) { + return exprMonth(candidate); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/PeriodAddFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/PeriodAddFunction.java new file mode 100644 index 0000000000..7036f725e6 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/PeriodAddFunction.java @@ -0,0 +1,27 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprIntegerValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.datetime.DateTimeFunctions; + +public class PeriodAddFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + + ExprValue periodAddExpr = + DateTimeFunctions.exprPeriodAdd( + new ExprIntegerValue((Number) args[0]), new ExprIntegerValue((Number) args[1])); + + return periodAddExpr.integerValue(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/PeriodDiffFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/PeriodDiffFunction.java new file mode 100644 index 0000000000..c2aa0dd15f --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/PeriodDiffFunction.java @@ -0,0 +1,33 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import com.google.common.collect.ImmutableList; +import java.util.Arrays; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprIntegerValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.datetime.DateTimeFunctions; + +public class PeriodDiffFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + UserDefinedFunctionUtils.validateArgumentCount("PERIOD_DIFF", 2, args.length, false); + + UserDefinedFunctionUtils.validateArgumentTypes( + Arrays.asList(args), ImmutableList.of(Number.class, Number.class)); + + ExprValue periodDiffExpr = + DateTimeFunctions.exprPeriodDiff( + new ExprIntegerValue((Number) args[0]), new ExprIntegerValue((Number) args[1])); + + return periodDiffExpr.integerValue(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/PeriodNameFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/PeriodNameFunction.java new file mode 100644 index 0000000000..0eb792d28b --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/PeriodNameFunction.java @@ -0,0 +1,56 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.ZoneOffset; +import java.time.format.TextStyle; +import java.util.Locale; +import java.util.Objects; +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.calcite.utils.datetime.DateTimeParser; +import org.opensearch.sql.calcite.utils.datetime.InstantUtils; + +/** + * We cannot use dayname/monthname in calcite because they're different with our current performance + * e.g. August -> Aug, Wednesday -> Wed + */ +public class PeriodNameFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + Object candiate = args[0]; + Object type = args[1]; + SqlTypeName argumentType = (SqlTypeName) args[2]; + LocalDate localDate; + if (candiate instanceof String) { + localDate = DateTimeParser.parse(candiate.toString()).toLocalDate(); + } else if (argumentType == SqlTypeName.DATE) { + localDate = + LocalDate.ofInstant(InstantUtils.fromInternalDate((int) candiate), ZoneOffset.UTC); + } else if (argumentType == SqlTypeName.TIMESTAMP) { + localDate = + LocalDateTime.ofInstant(InstantUtils.fromEpochMills((long) candiate), ZoneOffset.UTC) + .toLocalDate(); + } else { + throw new IllegalArgumentException("something wrong"); + } + String nameType = (String) type; + // TODO: Double-check whether it is ok to always return US week & month names + if (Objects.equals(nameType, "MONTHNAME")) { + return localDate.getMonth().getDisplayName(TextStyle.FULL, Locale.getDefault()); + } else if (Objects.equals(nameType, "DAYNAME")) { + return localDate.getDayOfWeek().getDisplayName(TextStyle.FULL, Locale.getDefault()); + } else { + throw new IllegalArgumentException("something wrong"); + } + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/PostprocessDateToStringFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/PostprocessDateToStringFunction.java new file mode 100644 index 0000000000..25bde13f3e --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/PostprocessDateToStringFunction.java @@ -0,0 +1,29 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.formatTimestampWithoutUnnecessaryNanos; +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.InstantUtils.parseStringToTimestamp; + +import java.time.LocalDateTime; +import java.util.Objects; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class PostprocessDateToStringFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + Object candidate = args[0]; + if (Objects.isNull(candidate)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + LocalDateTime localDateTime = parseStringToTimestamp((String) candidate, restored); + String formatted = formatTimestampWithoutUnnecessaryNanos(localDateTime); + return formatted; + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/PostprocessForUDTFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/PostprocessForUDTFunction.java new file mode 100644 index 0000000000..9300811b92 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/PostprocessForUDTFunction.java @@ -0,0 +1,40 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneOffset; +import java.util.Objects; +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.datetime.InstantUtils; +import org.opensearch.sql.data.model.ExprDateValue; +import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprTimestampValue; + +public class PostprocessForUDTFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + Object candidate = args[0]; + if (Objects.isNull(candidate)) { + return null; + } + SqlTypeName sqlTypeName = (SqlTypeName) args[1]; + Instant instant = InstantUtils.convertToInstant(candidate, sqlTypeName); + LocalDateTime localDateTime = LocalDateTime.ofInstant(instant, ZoneOffset.UTC); + switch (sqlTypeName) { + case DATE: + return new ExprDateValue(localDateTime.toLocalDate()).valueForCalcite(); + case TIME: + return new ExprTimeValue(localDateTime.toLocalTime()).valueForCalcite(); + case TIMESTAMP: + return new ExprTimestampValue(localDateTime).valueForCalcite(); + default: + throw new IllegalArgumentException("Unsupported datetime type: " + sqlTypeName); + } + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/QuarterFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/QuarterFunction.java new file mode 100644 index 0000000000..8a41c78d61 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/QuarterFunction.java @@ -0,0 +1,40 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferTimeToTimestamp; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprQuarter; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class QuarterFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + ExprValue candidate = transferInputToExprValue(args[0], (SqlTypeName) args[1]); + if ((SqlTypeName) args[1] == SqlTypeName.TIME) { + return extractForTime(candidate, restored).valueForCalcite(); + } + return extract(candidate).valueForCalcite(); + } + + public ExprValue extractForTime(ExprValue candidate, FunctionProperties functionProperties) { + return exprQuarter(transferTimeToTimestamp(candidate, functionProperties)); + } + + public ExprValue extract(ExprValue candidate) { + return exprQuarter(candidate); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/SecondFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/SecondFunction.java new file mode 100644 index 0000000000..d976c5103a --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/SecondFunction.java @@ -0,0 +1,39 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprSecond; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class SecondFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + ExprValue candidate = transferInputToExprValue(args[0], (SqlTypeName) args[1]); + if ((SqlTypeName) args[1] == SqlTypeName.TIME) { + return extractForTime(candidate, restored).valueForCalcite(); + } + return extract(candidate).valueForCalcite(); + } + + public ExprValue extractForTime(ExprValue candidate, FunctionProperties functionProperties) { + return exprSecond(candidate); + } + + public ExprValue extract(ExprValue candidate) { + return exprSecond(candidate); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/SecondToTimeFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/SecondToTimeFunction.java new file mode 100644 index 0000000000..87f96a7d4a --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/SecondToTimeFunction.java @@ -0,0 +1,41 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprSecToTime; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprSecToTimeWithNanos; + +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.data.model.ExprValueUtils; + +public class SecondToTimeFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + Number candidate = (Number) args[0]; + ExprValue returnTimeValue; + ExprValue transferredValue; + if (candidate instanceof Long) { + transferredValue = ExprValueUtils.longValue((Long) candidate); + returnTimeValue = exprSecToTime(transferredValue); + } else if (candidate instanceof Integer) { + transferredValue = ExprValueUtils.integerValue((Integer) candidate); + returnTimeValue = exprSecToTime(transferredValue); + } else if (candidate instanceof Double) { + transferredValue = ExprValueUtils.doubleValue((Double) candidate); + returnTimeValue = exprSecToTimeWithNanos(transferredValue); + } else { + transferredValue = ExprValueUtils.floatValue((Float) candidate); + returnTimeValue = exprSecToTimeWithNanos(transferredValue); + } + return new ExprTimeValue(returnTimeValue.timeValue()).valueForCalcite(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/StrToDateFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/StrToDateFunction.java new file mode 100644 index 0000000000..2de628a377 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/StrToDateFunction.java @@ -0,0 +1,44 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; + +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprStringValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.datetime.DateTimeFunctions; +import org.opensearch.sql.expression.function.FunctionProperties; + +/** + * str_to_date(string, string) is used to extract a TIMESTAMP from the first argument string using + * the formats specified in the second argument string. The input argument must have enough + * information to be parsed as a DATE, TIMESTAMP, or TIME. Acceptable string format specifiers are + * the same as those used in the DATE_FORMAT function. It returns NULL when a statement cannot be + * parsed due to an invalid pair of arguments, and when 0 is provided for any DATE field. Otherwise, + * it will return a TIMESTAMP with the parsed values (as well as default values for any field that + * was not parsed). + */ +public class StrToDateFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + ExprValue formatedDateExpr = + DateTimeFunctions.exprStrToDate( + restored, + new ExprStringValue(args[0].toString()), + new ExprStringValue(args[1].toString())); + + if (formatedDateExpr.isNull()) { + return null; + } + return formatedDateExpr.valueForCalcite(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/SysdateFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/SysdateFunction.java new file mode 100644 index 0000000000..d373f138ed --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/SysdateFunction.java @@ -0,0 +1,30 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.formatNow; + +import java.time.Clock; +import java.time.LocalDateTime; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprTimestampValue; + +public class SysdateFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + LocalDateTime localDateTime; + if (args.length == 0) { + localDateTime = formatNow(Clock.systemDefaultZone(), 0); + } else { + localDateTime = formatNow(Clock.systemDefaultZone(), (int) args[0]); + } + return new ExprTimestampValue(localDateTime).valueForCalcite(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimeAddSubFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimeAddSubFunction.java new file mode 100644 index 0000000000..8809c14da7 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimeAddSubFunction.java @@ -0,0 +1,47 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprAddTime; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprSubTime; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class TimeAddSubFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + Object argBase = args[0]; + SqlTypeName baseType = (SqlTypeName) args[1]; + Object argInterval = args[2]; + SqlTypeName argIntervalType = (SqlTypeName) args[3]; + boolean isAdd = (boolean) args[4]; + ExprValue baseValue = transferInputToExprValue(args[0], baseType); + ExprValue intervalValue = transferInputToExprValue(argInterval, argIntervalType); + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + ExprValue result; + if (isAdd) { + result = exprAddTime(restored, baseValue, intervalValue); + } else { + result = exprSubTime(restored, baseValue, intervalValue); + } + + if (baseType == SqlTypeName.TIME) { + return new ExprTimeValue(result.timeValue()).valueForCalcite(); + } else { + return result.valueForCalcite(); + } + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimeDiffFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimeDiffFunction.java new file mode 100644 index 0000000000..99540cbc06 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimeDiffFunction.java @@ -0,0 +1,31 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprTimeDiff; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprValue; + +public class TimeDiffFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + SqlTypeName startType = (SqlTypeName) args[2]; + SqlTypeName endType = (SqlTypeName) args[3]; + ExprValue diffValue = + exprTimeDiff( + transferInputToExprValue(args[0], startType), + transferInputToExprValue(args[1], endType)); + return new ExprTimeValue(diffValue.timeValue()).valueForCalcite(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimeFormatFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimeFormatFunction.java new file mode 100644 index 0000000000..5d5a857c9e --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimeFormatFunction.java @@ -0,0 +1,28 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFormatterUtil.getFormattedTime; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprStringValue; + +public class TimeFormatFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + SqlTypeName sqlTypeName = (SqlTypeName) args[2]; + String format = (String) args[1]; + return getFormattedTime( + transferInputToExprValue(args[0], sqlTypeName), new ExprStringValue(format)) + .stringValue(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimeFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimeFunction.java new file mode 100644 index 0000000000..dceb87cdc4 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimeFunction.java @@ -0,0 +1,26 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprTime; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; + +public class TimeFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + + Object argTime = args[0]; + SqlTypeName argType = (SqlTypeName) args[1]; + return exprTime(transferInputToExprValue(argTime, argType)).valueForCalcite(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimeToSecondFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimeToSecondFunction.java new file mode 100644 index 0000000000..89b7ec12a6 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimeToSecondFunction.java @@ -0,0 +1,26 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.OpenSearchTypeFactory.convertSqlTypeNameToExprType; +import static org.opensearch.sql.data.model.ExprValueUtils.fromObjectValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprTimeToSec; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; + +public class TimeToSecondFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + SqlTypeName timeType = (SqlTypeName) args[1]; + return exprTimeToSec(fromObjectValue(args[0], convertSqlTypeNameToExprType(timeType))) + .longValue(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimestampAddFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimestampAddFunction.java new file mode 100644 index 0000000000..39712309e7 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimestampAddFunction.java @@ -0,0 +1,43 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprTimestampAdd; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprTimestampAddForTimeType; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprLongValue; +import org.opensearch.sql.data.model.ExprStringValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class TimestampAddFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + String addUnit = (String) args[0]; + int amount = (int) args[1]; + SqlTypeName sqlTypeName = (SqlTypeName) args[3]; + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + ExprValue timestampBase = transferInputToExprValue(args[2], sqlTypeName); + if (sqlTypeName == SqlTypeName.TIME) { + return exprTimestampAddForTimeType( + restored.getQueryStartClock(), + new ExprStringValue(addUnit), + new ExprLongValue(amount), + timestampBase); + } + ExprValue returnValue = + exprTimestampAdd(new ExprStringValue(addUnit), new ExprLongValue(amount), timestampBase); + return returnValue.valueForCalcite(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimestampDiffFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimestampDiffFunction.java new file mode 100644 index 0000000000..955957bb38 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimestampDiffFunction.java @@ -0,0 +1,44 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprTimestampDiff; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprTimestampDiffForTimeType; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprStringValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class TimestampDiffFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + String addUnit = (String) args[0]; + SqlTypeName sqlTypeName1 = (SqlTypeName) args[2]; + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + + SqlTypeName sqlTypeName2 = (SqlTypeName) args[4]; + if (sqlTypeName1 == SqlTypeName.TIME || sqlTypeName2 == SqlTypeName.TIME) { + return exprTimestampDiffForTimeType( + restored, + new ExprStringValue(addUnit), + transferInputToExprValue(args[1], SqlTypeName.TIME), + transferInputToExprValue(args[3], SqlTypeName.TIME)) + .longValue(); + } + ExprValue timestamp1 = transferInputToExprValue(args[1], sqlTypeName1); + ExprValue timestamp2 = transferInputToExprValue(args[3], sqlTypeName2); + ExprValue diffResult = exprTimestampDiff(new ExprStringValue(addUnit), timestamp1, timestamp2); + return diffResult.longValue(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimestampFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimestampFunction.java new file mode 100644 index 0000000000..b72a48efdd --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/TimestampFunction.java @@ -0,0 +1,45 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprTimestampValue; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprAddTime; + +import java.util.Objects; +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +/** + * We need to write our own since we are actually implement timestamp add here + * (STRING/DATE/TIME/DATETIME/TIMESTAMP) -> TIMESTAMP (STRING/DATE/TIME/DATETIME/TIMESTAMP, + * STRING/DATE/TIME/DATETIME/TIMESTAMP) -> TIMESTAMP + */ +public class TimestampFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + if (Objects.isNull(args[0])) { + return null; + } + if (args.length == 3) { + SqlTypeName sqlTypeName = (SqlTypeName) args[1]; + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + return transferInputToExprTimestampValue(args[0], sqlTypeName, restored).valueForCalcite(); + } else { + SqlTypeName sqlTypeName = (SqlTypeName) args[2]; + ExprValue dateTimeBase = transferInputToExprValue(args[0], sqlTypeName); + ExprValue addTime = transferInputToExprValue(args[1], (SqlTypeName) args[3]); + return exprAddTime(FunctionProperties.None, dateTimeBase, addTime).valueForCalcite(); + } + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/ToDaysFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/ToDaysFunction.java new file mode 100644 index 0000000000..12c47072bf --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/ToDaysFunction.java @@ -0,0 +1,24 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprToDays; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; + +public class ToDaysFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + SqlTypeName sqlTypeName = (SqlTypeName) args[1]; + return exprToDays(transferInputToExprValue(args[0], sqlTypeName)).longValue(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/ToSecondsFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/ToSecondsFunction.java new file mode 100644 index 0000000000..8af485ad09 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/ToSecondsFunction.java @@ -0,0 +1,36 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprTimestampValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprToSeconds; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprToSecondsForIntType; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprLongValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class ToSecondsFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + SqlTypeName sqlTypeName = (SqlTypeName) args[1]; + switch (sqlTypeName) { + case DATE, TIME, TIMESTAMP, CHAR, VARCHAR: // need to transfer to timestamp firstly + ExprValue dateTimeValue = transferInputToExprTimestampValue(args[0], sqlTypeName, restored); + return exprToSeconds(dateTimeValue).longValue(); + default: + return exprToSecondsForIntType(new ExprLongValue((Number) args[0])).longValue(); + } + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/UnixTimeStampFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/UnixTimeStampFunction.java new file mode 100644 index 0000000000..9d50a3ee96 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/UnixTimeStampFunction.java @@ -0,0 +1,39 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.OpenSearchTypeFactory.convertSqlTypeNameToExprType; +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.data.model.ExprValueUtils.fromObjectValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.unixTimeStamp; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.unixTimeStampOf; + +import java.util.Objects; +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class UnixTimeStampFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + if (args.length == 1) { + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + return unixTimeStamp(restored.getQueryStartClock()).longValue(); + } + Object input = args[0]; + if (Objects.isNull(input)) { + return null; + } + ExprValue candidate = + fromObjectValue(args[0], convertSqlTypeNameToExprType((SqlTypeName) args[1])); + return (double) unixTimeStampOf(candidate).longValue(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/UtcDateFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/UtcDateFunction.java new file mode 100644 index 0000000000..d8f3efc95d --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/UtcDateFunction.java @@ -0,0 +1,24 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprUtcDate; + +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class UtcDateFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[0]); + return exprUtcDate(restored).valueForCalcite(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/UtcTimeFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/UtcTimeFunction.java new file mode 100644 index 0000000000..2753afc32e --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/UtcTimeFunction.java @@ -0,0 +1,24 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprUtcTime; + +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class UtcTimeFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + return exprUtcTime(restored).valueForCalcite(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/UtcTimeStampFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/UtcTimeStampFunction.java new file mode 100644 index 0000000000..ea0bf8a3be --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/UtcTimeStampFunction.java @@ -0,0 +1,24 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprUtcTimeStamp; + +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class UtcTimeStampFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[0]); + return exprUtcTimeStamp(restored).valueForCalcite(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/WeekDayFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/WeekDayFunction.java new file mode 100644 index 0000000000..cf4811ae45 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/WeekDayFunction.java @@ -0,0 +1,32 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprWeekday; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.formatNow; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class WeekDayFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + SqlTypeName sqlTypeName = (SqlTypeName) args[1]; + if (sqlTypeName == SqlTypeName.TIME) { + return formatNow(restored.getQueryStartClock()).getDayOfWeek().getValue() - 1; + } else { + return exprWeekday(transferInputToExprValue(args[0], sqlTypeName)).integerValue(); + } + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/WeekFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/WeekFunction.java new file mode 100644 index 0000000000..f829f9a077 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/WeekFunction.java @@ -0,0 +1,33 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.OpenSearchTypeFactory.convertSqlTypeNameToExprType; +import static org.opensearch.sql.data.model.ExprValueUtils.fromObjectValue; + +import java.util.Objects; +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.data.model.ExprIntegerValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.datetime.DateTimeFunctions; + +/** WEEK & WEEK_OF_YEAR */ +public class WeekFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + + if (Objects.isNull(args[0])) { + return null; + } + + ExprValue candidate = + fromObjectValue(args[0], convertSqlTypeNameToExprType((SqlTypeName) args[2])); + ExprValue woyExpr = + DateTimeFunctions.exprWeek(candidate, new ExprIntegerValue((Number) args[1])); + return woyExpr.integerValue(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/YearFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/YearFunction.java new file mode 100644 index 0000000000..78458fbc5d --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/YearFunction.java @@ -0,0 +1,40 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferTimeToTimestamp; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprYear; + +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class YearFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + ExprValue candidate = transferInputToExprValue(args[0], (SqlTypeName) args[1]); + if ((SqlTypeName) args[1] == SqlTypeName.TIME) { + return extractForTime(candidate, restored).valueForCalcite(); + } + return extract(candidate).valueForCalcite(); + } + + public ExprValue extractForTime(ExprValue candidate, FunctionProperties functionProperties) { + return exprYear(transferTimeToTimestamp(candidate, functionProperties)); + } + + public ExprValue extract(ExprValue candidate) { + return exprYear(candidate); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/YearWeekFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/YearWeekFunction.java new file mode 100644 index 0000000000..58099d6c40 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/datetimeUDF/YearWeekFunction.java @@ -0,0 +1,50 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.datetimeUDF; + +import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.restoreFunctionProperties; +import static org.opensearch.sql.calcite.utils.datetime.DateTimeApplyUtils.transferInputToExprValue; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprYearweek; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.yearweekToday; + +import java.util.Objects; +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils; +import org.opensearch.sql.data.model.ExprIntegerValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class YearWeekFunction implements UserDefinedFunction { + @Override + public Object eval(Object... args) { + + if (UserDefinedFunctionUtils.containsNull(args)) { + return null; + } + int mode; + if (Objects.isNull(args[0])) { + return null; + } + SqlTypeName sqlTypeName; + ExprValue exprValue; + if (args.length == 3) { + sqlTypeName = (SqlTypeName) args[1]; + mode = 0; + } else { + sqlTypeName = (SqlTypeName) args[2]; + mode = (int) args[1]; + } + FunctionProperties restored = restoreFunctionProperties(args[args.length - 1]); + if (sqlTypeName == SqlTypeName.TIME) { + return yearweekToday(new ExprIntegerValue(mode), restored.getQueryStartClock()) + .integerValue(); + } + exprValue = transferInputToExprValue(args[0], sqlTypeName); + ExprValue yearWeekValue = exprYearweek(exprValue, new ExprIntegerValue(mode)); + return yearWeekValue.integerValue(); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/udaf/PercentileApproxFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/udaf/PercentileApproxFunction.java new file mode 100644 index 0000000000..1c69c198a8 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/udaf/PercentileApproxFunction.java @@ -0,0 +1,95 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.udf.udaf; + +import com.tdunning.math.stats.AVLTreeDigest; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.calcite.udf.UserDefinedAggFunction; + +/** We write by ourselves since it's an approximate algorithm */ +public class PercentileApproxFunction + implements UserDefinedAggFunction { + SqlTypeName returnType; + private double compression; + double percentile; + + @Override + public PencentileApproAccumulator init() { + returnType = SqlTypeName.DOUBLE; + compression = 100.0; + percentile = 1.0; + return new PencentileApproAccumulator(); + } + + // Add values to the accumulator + @Override + public PencentileApproAccumulator add(PencentileApproAccumulator acc, Object... values) { + Object targetValue = values[0]; + if (Objects.isNull(targetValue)) { + return acc; + } + percentile = ((Number) values[1]).intValue() / 100.0; + returnType = (SqlTypeName) values[values.length - 1]; + if (values.length > 3) { // have compression + compression = ((Number) values[values.length - 2]).doubleValue(); + } + + acc.evaluate(((Number) targetValue).doubleValue()); + return acc; + } + + // Calculate the percentile + @Override + public Object result(PencentileApproAccumulator acc) { + if (acc.size() == 0) { + return null; + } + double retValue = (double) acc.value(compression, percentile); + switch (returnType) { + case INTEGER: + int intRet = (int) retValue; + return intRet; + case BIGINT: + long longRet = (long) retValue; + return longRet; + case FLOAT: + float floatRet = (float) retValue; + return floatRet; + default: + return acc.value(); + } + } + + public static class PencentileApproAccumulator implements Accumulator { + private List candidate; + + public int size() { + return candidate.size(); + } + + public PencentileApproAccumulator() { + candidate = new ArrayList<>(); + } + + public void evaluate(double value) { + candidate.add(value); + } + + @Override + public Object value(Object... argList) { + double percent = (double) argList[1]; + double compression = (double) argList[0]; + AVLTreeDigest tree = new AVLTreeDigest(compression); + for (Number num : candidate) { + tree.add(num.doubleValue()); + } + return tree.quantile(percent); + } + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/udf/udaf/TakeAggFunction.java b/core/src/main/java/org/opensearch/sql/calcite/udf/udaf/TakeAggFunction.java index c4829df64c..8a43a84702 100644 --- a/core/src/main/java/org/opensearch/sql/calcite/udf/udaf/TakeAggFunction.java +++ b/core/src/main/java/org/opensearch/sql/calcite/udf/udaf/TakeAggFunction.java @@ -44,7 +44,7 @@ public TakeAccumulator() { } @Override - public Object value() { + public Object value(Object... argList) { return hits; } diff --git a/core/src/main/java/org/opensearch/sql/calcite/utils/AggregateUtils.java b/core/src/main/java/org/opensearch/sql/calcite/utils/AggregateUtils.java index b264f0cf27..ce8f1cc761 100644 --- a/core/src/main/java/org/opensearch/sql/calcite/utils/AggregateUtils.java +++ b/core/src/main/java/org/opensearch/sql/calcite/utils/AggregateUtils.java @@ -12,15 +12,18 @@ import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.TransferUserDefinedAggFunction; import com.google.common.collect.ImmutableList; +import java.util.ArrayList; import java.util.List; import org.apache.calcite.rel.RelCollations; import org.apache.calcite.rel.core.AggregateCall; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.SqlAggFunction; +import org.apache.calcite.sql.type.ReturnTypes; import org.apache.calcite.tools.RelBuilder; import org.opensearch.sql.ast.expression.AggregateFunction; import org.opensearch.sql.calcite.CalcitePlanContext; +import org.opensearch.sql.calcite.udf.udaf.PercentileApproxFunction; import org.opensearch.sql.calcite.udf.udaf.TakeAggFunction; import org.opensearch.sql.expression.function.BuiltinFunctionName; @@ -70,10 +73,15 @@ static RelBuilder.AggCall translate( argList, context.relBuilder); case PERCENTILE_APPROX: - throw new UnsupportedOperationException("PERCENTILE_APPROX is not supported in PPL"); - // case APPROX_COUNT_DISTINCT: - // return - // context.relBuilder.aggregateCall(SqlStdOperatorTable.APPROX_COUNT_DISTINCT, field); + List newArgList = new ArrayList<>(argList); + newArgList.add(context.rexBuilder.makeFlag(field.getType().getSqlTypeName())); + return TransferUserDefinedAggFunction( + PercentileApproxFunction.class, + "percentile_approx", + ReturnTypes.ARG0_FORCE_NULLABLE, + List.of(field), + newArgList, + context.relBuilder); } throw new IllegalStateException("Not Supported value: " + agg.getFuncName()); } diff --git a/core/src/main/java/org/opensearch/sql/calcite/utils/BuiltinFunctionUtils.java b/core/src/main/java/org/opensearch/sql/calcite/utils/BuiltinFunctionUtils.java index a014e9516d..fdd7738ca7 100644 --- a/core/src/main/java/org/opensearch/sql/calcite/utils/BuiltinFunctionUtils.java +++ b/core/src/main/java/org/opensearch/sql/calcite/utils/BuiltinFunctionUtils.java @@ -6,26 +6,88 @@ package org.opensearch.sql.calcite.utils; import static java.lang.Math.E; +import static org.opensearch.sql.calcite.utils.OpenSearchTypeFactory.*; import static org.opensearch.sql.calcite.utils.OpenSearchTypeFactory.getLegacyTypeName; import static org.opensearch.sql.calcite.utils.UserDefinedFunctionUtils.*; +import com.google.common.collect.ImmutableList; import java.math.BigDecimal; import java.util.ArrayList; import java.util.List; import java.util.Locale; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.calcite.avatica.util.TimeUnit; +import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; +import org.apache.calcite.rex.RexBuilder; +import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexNode; +import org.apache.calcite.sql.SqlIntervalQualifier; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.fun.SqlLibraryOperators; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.fun.SqlTrimFunction; +import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.type.ReturnTypes; import org.apache.calcite.sql.type.SqlTypeName; +import org.apache.calcite.sql.type.SqlTypeTransforms; import org.opensearch.sql.calcite.CalcitePlanContext; +import org.opensearch.sql.calcite.ExtendedRexBuilder; import org.opensearch.sql.calcite.udf.SpanFunction; import org.opensearch.sql.calcite.udf.conditionUDF.IfFunction; import org.opensearch.sql.calcite.udf.conditionUDF.IfNullFunction; import org.opensearch.sql.calcite.udf.conditionUDF.NullIfFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.ConvertTZFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.DateAddSubFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.DateDiffFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.DateFormatFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.DateFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.DatetimeFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.DayFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.DayOfWeekFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.DayOfYearFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.ExtractFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.FromDaysFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.FromUnixTimestampFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.GetFormatFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.HourFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.LastDayFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.MakeDateFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.MakeTimeFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.MicrosecondFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.MinuteFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.MinuteOfDayFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.MonthFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.PeriodAddFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.PeriodDiffFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.PeriodNameFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.PostprocessForUDTFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.QuarterFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.SecondFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.SecondToTimeFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.StrToDateFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.SysdateFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.TimeAddSubFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.TimeDiffFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.TimeFormatFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.TimeFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.TimeToSecondFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.TimestampAddFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.TimestampDiffFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.TimestampFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.ToDaysFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.ToSecondsFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.UnixTimeStampFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.UtcDateFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.UtcTimeFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.UtcTimeStampFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.WeekDayFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.WeekFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.YearFunction; +import org.opensearch.sql.calcite.udf.datetimeUDF.YearWeekFunction; import org.opensearch.sql.calcite.udf.mathUDF.CRC32Function; import org.opensearch.sql.calcite.udf.mathUDF.ConvFunction; import org.opensearch.sql.calcite.udf.mathUDF.EulerFunction; @@ -35,10 +97,18 @@ import org.opensearch.sql.calcite.udf.textUDF.LocateFunction; import org.opensearch.sql.calcite.udf.textUDF.ReplaceFunction; +/** + * TODO: We need to refactor code to make all related part together and directly return call TODO: + * Remove all makeFlag and use literal directly + */ public interface BuiltinFunctionUtils { + Set TIME_EXCLUSIVE_OPS = + Set.of("SECOND", "SECOND_OF_MINUTE", "MINUTE", "MINUTE_OF_HOUR", "HOUR", "HOUR_OF_DAY"); + static SqlOperator translate(String op) { - switch (op.toUpperCase(Locale.ROOT)) { + String capitalOP = op.toUpperCase(Locale.ROOT); + switch (capitalOP) { case "AND": return SqlStdOperatorTable.AND; case "OR": @@ -102,7 +172,7 @@ static SqlOperator translate(String op) { return TransferUserDefinedFunction( LocateFunction.class, "LOCATE", - getNullableReturnTypeInferenceForFixedType(SqlTypeName.INTEGER)); + ReturnTypes.INTEGER.andThen(SqlTypeTransforms.FORCE_NULLABLE)); case "UPPER": return SqlStdOperatorTable.UPPER; // Built-in Math Functions @@ -147,9 +217,7 @@ static SqlOperator translate(String op) { // MOD(3.1, 2.1) = 1.1, // whereas SqlStdOperatorTable.MOD supports only integer / long parameters. return TransferUserDefinedFunction( - ModFunction.class, - "MOD", - getLeastRestrictiveReturnTypeAmongArgsAt(List.of(0, 1), true)); + ModFunction.class, "MOD", getLeastRestrictiveReturnTypeAmongArgsAt(List.of(0, 1))); case "PI": return SqlStdOperatorTable.PI; case "POW", "POWER": @@ -172,29 +240,91 @@ static SqlOperator translate(String op) { case "CBRT": return SqlStdOperatorTable.CBRT; // Built-in Date Functions - case "CURRENT_TIMESTAMP": - return SqlStdOperatorTable.CURRENT_TIMESTAMP; - case "CURRENT_DATE": - return SqlStdOperatorTable.CURRENT_DATE; + case "CURRENT_TIMESTAMP", "NOW", "LOCALTIMESTAMP", "LOCALTIME": + return TransferUserDefinedFunction( + PostprocessForUDTFunction.class, "POSTPROCESS", timestampInference); + case "CURTIME", "CURRENT_TIME": + return TransferUserDefinedFunction( + PostprocessForUDTFunction.class, "POSTPROCESS", timeInference); + case "CURRENT_DATE", "CURDATE": + return TransferUserDefinedFunction( + PostprocessForUDTFunction.class, "POSTPROCESS", dateInference); case "DATE": - return SqlLibraryOperators.DATE; - case "ADDDATE": - return SqlLibraryOperators.DATE_ADD_SPARK; + return TransferUserDefinedFunction(DateFunction.class, "DATE", dateInference); case "DATE_ADD": - return SqlLibraryOperators.DATEADD; + return TransferUserDefinedFunction( + DateAddSubFunction.class, "DATE_ADD", timestampInference); + case "ADDDATE": + return TransferUserDefinedFunction( + DateAddSubFunction.class, "ADDDATE", DateAddSubFunction.getReturnTypeForAddOrSubDate()); + case "SUBDATE": + return TransferUserDefinedFunction( + DateAddSubFunction.class, "SUBDATE", DateAddSubFunction.getReturnTypeForAddOrSubDate()); + case "DATE_SUB": + return TransferUserDefinedFunction( + DateAddSubFunction.class, "DATE_SUB", timestampInference); + case "ADDTIME", "SUBTIME": + return TransferUserDefinedFunction( + TimeAddSubFunction.class, + capitalOP, + UserDefinedFunctionUtils.getReturnTypeForTimeAddSub()); + case "DAY_OF_WEEK", "DAYOFWEEK": + return TransferUserDefinedFunction( + DayOfWeekFunction.class, capitalOP, INTEGER_FORCE_NULLABLE); + case "DAY_OF_YEAR", "DAYOFYEAR": + return TransferUserDefinedFunction( + DayOfYearFunction.class, capitalOP, INTEGER_FORCE_NULLABLE); + case "EXTRACT": + // Reuse OpenSearch PPL's implementation + return TransferUserDefinedFunction(ExtractFunction.class, "EXTRACT", ReturnTypes.BIGINT); + case "CONVERT_TZ": + return TransferUserDefinedFunction( + ConvertTZFunction.class, "CONVERT_TZ", timestampInference); + case "DATETIME": + return TransferUserDefinedFunction(DatetimeFunction.class, "DATETIME", timestampInference); + + case "FROM_DAYS": + return TransferUserDefinedFunction(FromDaysFunction.class, "FROM_DAYS", dateInference); + case "DATE_FORMAT": + return TransferUserDefinedFunction( + DateFormatFunction.class, "DATE_FORMAT", ReturnTypes.VARCHAR); + case "GET_FORMAT": + return TransferUserDefinedFunction( + GetFormatFunction.class, "GET_FORMAT", ReturnTypes.VARCHAR); + case "MAKETIME": + return TransferUserDefinedFunction(MakeTimeFunction.class, "MAKETIME", timeInference); + case "MAKEDATE": + return TransferUserDefinedFunction(MakeDateFunction.class, "MAKEDATE", dateInference); + case "MINUTE_OF_DAY": + return TransferUserDefinedFunction( + MinuteOfDayFunction.class, "MINUTE_OF_DAY", ReturnTypes.INTEGER); + case "PERIOD_ADD": + return TransferUserDefinedFunction( + PeriodAddFunction.class, "PERIOD_ADD", ReturnTypes.INTEGER); + case "PERIOD_DIFF": + return TransferUserDefinedFunction( + PeriodDiffFunction.class, "PERIOD_DIFF", ReturnTypes.INTEGER); + case "STR_TO_DATE": + return TransferUserDefinedFunction( + StrToDateFunction.class, "STR_TO_DATE", timestampInference); + case "WEEK", "WEEK_OF_YEAR": + // WEEK in PPL support an additional mode argument, therefore we need to use a custom + // implementation. + return TransferUserDefinedFunction(WeekFunction.class, "WEEK", ReturnTypes.INTEGER); // UDF Functions case "SPAN": return TransferUserDefinedFunction( SpanFunction.class, "SPAN", ReturnTypes.ARG0_FORCE_NULLABLE); // Built-in condition functions case "IF": - return TransferUserDefinedFunction(IfFunction.class, "if", getReturnTypeInference(1)); + return TransferUserDefinedFunction( + IfFunction.class, "if", ReturnTypes.ARG1.andThen(SqlTypeTransforms.FORCE_NULLABLE)); case "IFNULL": return TransferUserDefinedFunction( - IfNullFunction.class, "ifnull", getReturnTypeInference(1)); + IfNullFunction.class, "ifnull", ReturnTypes.ARG0_FORCE_NULLABLE); case "NULLIF": return TransferUserDefinedFunction( - NullIfFunction.class, "ifnull", getReturnTypeInference(0)); + NullIfFunction.class, "nullif", ReturnTypes.ARG0_FORCE_NULLABLE); case "IS NOT NULL": return SqlStdOperatorTable.IS_NOT_NULL; case "IS NULL": @@ -203,6 +333,82 @@ static SqlOperator translate(String op) { // TODO optimize this function to ImplementableFunction return TransferUserDefinedFunction( TypeOfFunction.class, "typeof", ReturnTypes.VARCHAR_2000_NULLABLE); + case "DAYNAME": + return TransferUserDefinedFunction(PeriodNameFunction.class, "DAYNAME", ReturnTypes.CHAR); + case "MONTHNAME": + return TransferUserDefinedFunction(PeriodNameFunction.class, "MONTHNAME", ReturnTypes.CHAR); + case "LAST_DAY": + return TransferUserDefinedFunction(LastDayFunction.class, "LAST_DAY", dateInference); + case "UNIX_TIMESTAMP": + return TransferUserDefinedFunction( + UnixTimeStampFunction.class, "unix_timestamp", ReturnTypes.DOUBLE); + case "SYSDATE": + return TransferUserDefinedFunction(SysdateFunction.class, "SYSDATE", timestampInference); + case "TIME": + return TransferUserDefinedFunction(TimeFunction.class, "TIME", timeInference); + case "TIMEDIFF": + return TransferUserDefinedFunction(TimeDiffFunction.class, "TIMEDIFF", timeInference); + case "TIME_TO_SEC": + return TransferUserDefinedFunction( + TimeToSecondFunction.class, "TIME_TO_SEC", ReturnTypes.BIGINT); + case "TIME_FORMAT": + return TransferUserDefinedFunction( + TimeFormatFunction.class, "TIME_FORMAT", ReturnTypes.CHAR); + case "TIMESTAMP": + // return SqlLibraryOperators.TIMESTAMP; + return TransferUserDefinedFunction( + TimestampFunction.class, "timestamp", timestampInference); + case "TIMESTAMPADD": + // return SqlLibraryOperators.TIMESTAMP; + return TransferUserDefinedFunction( + TimestampAddFunction.class, "TIMESTAMPADD", timestampInference); + case "TIMESTAMPDIFF": + return TransferUserDefinedFunction( + TimestampDiffFunction.class, "TIMESTAMPDIFF", ReturnTypes.BIGINT); + case "DATEDIFF": + return TransferUserDefinedFunction(DateDiffFunction.class, "DATEDIFF", ReturnTypes.BIGINT); + case "TO_SECONDS": + return TransferUserDefinedFunction( + ToSecondsFunction.class, "TO_SECONDS", ReturnTypes.BIGINT); + case "TO_DAYS": + return TransferUserDefinedFunction(ToDaysFunction.class, "TO_DAYS", ReturnTypes.BIGINT); + case "SEC_TO_TIME": + return TransferUserDefinedFunction( + SecondToTimeFunction.class, "SEC_TO_TIME", timeInference); + case "YEAR": + return TransferUserDefinedFunction(YearFunction.class, capitalOP, INTEGER_FORCE_NULLABLE); + case "QUARTER": + return TransferUserDefinedFunction( + QuarterFunction.class, capitalOP, INTEGER_FORCE_NULLABLE); + case "MINUTE", "MINUTE_OF_HOUR": + return TransferUserDefinedFunction(MinuteFunction.class, capitalOP, INTEGER_FORCE_NULLABLE); + case "HOUR", "HOUR_OF_DAY": + return TransferUserDefinedFunction(HourFunction.class, capitalOP, INTEGER_FORCE_NULLABLE); + case "MONTH", "MONTH_OF_YEAR": + return TransferUserDefinedFunction(MonthFunction.class, capitalOP, INTEGER_FORCE_NULLABLE); + case "DAY_OF_MONTH", "DAYOFMONTH", "DAY": + return TransferUserDefinedFunction(DayFunction.class, capitalOP, INTEGER_FORCE_NULLABLE); + case "SECOND", "SECOND_OF_MINUTE": + return TransferUserDefinedFunction(SecondFunction.class, capitalOP, INTEGER_FORCE_NULLABLE); + case "MICROSECOND": + return TransferUserDefinedFunction( + MicrosecondFunction.class, "MICROSECOND", ReturnTypes.INTEGER); + case "YEARWEEK": + return TransferUserDefinedFunction(YearWeekFunction.class, "YEARWEEK", ReturnTypes.INTEGER); + case "FROM_UNIXTIME": + return TransferUserDefinedFunction( + FromUnixTimestampFunction.class, + "FROM_UNIXTIME", + FromUnixTimestampFunction.interReturnTypes()); + case "WEEKDAY": + return TransferUserDefinedFunction(WeekDayFunction.class, "WEEKDAY", ReturnTypes.INTEGER); + case "UTC_TIMESTAMP": + return TransferUserDefinedFunction( + UtcTimeStampFunction.class, "utc_timestamp", timestampInference); + case "UTC_TIME": + return TransferUserDefinedFunction(UtcTimeFunction.class, "utc_time", timeInference); + case "UTC_DATE": + return TransferUserDefinedFunction(UtcDateFunction.class, "utc_date", dateInference); // TODO Add more, ref RexImpTable default: throw new IllegalArgumentException("Unsupported operator: " + op); @@ -223,8 +429,9 @@ static SqlOperator translate(String op) { * expectations. */ static List translateArgument( - String op, List argList, CalcitePlanContext context) { - switch (op.toUpperCase(Locale.ROOT)) { + String op, List argList, CalcitePlanContext context, String currentTimestampStr) { + String capitalOP = op.toUpperCase(Locale.ROOT); + switch (capitalOP) { case "TRIM": List trimArgs = new ArrayList<>( @@ -274,6 +481,190 @@ static List translateArgument( throw new IllegalArgumentException("Log cannot accept argument list: " + argList); } return LogArgs; + case "DATE": + List dateArgs = + List.of( + argList.get(0), + context.rexBuilder.makeFlag(transferDateRelatedTimeName(argList.get(0))), + context.rexBuilder.makeLiteral(currentTimestampStr)); + return dateArgs; + case "HOUR", + "HOUR_OF_DAY", + "MINUTE", + "MINUTE_OF_HOUR", + "QUARTER", + "YEAR", + "LAST_DAY", + "DAY_OF_WEEK", + "DAYOFWEEK", + "DAY_OF_YEAR", + "DAYOFYEAR", + "MONTH", + "MONTH_OF_YEAR", + "DAY", + "DAY_OF_MONTH", + "DAYOFMONTH", + "SECOND", + "SECOND_OF_MINUTE": + return List.of( + argList.get(0), + context.rexBuilder.makeFlag(transferDateRelatedTimeName(argList.get(0))), + context.rexBuilder.makeLiteral(currentTimestampStr)); + case "CURRENT_TIMESTAMP", "NOW", "LOCALTIMESTAMP", "LOCALTIME": + RexNode currentTimestampCall = + context.rexBuilder.makeCall(SqlStdOperatorTable.CURRENT_TIMESTAMP, List.of()); + return List.of(currentTimestampCall, context.rexBuilder.makeFlag(SqlTypeName.TIMESTAMP)); + case "CURTIME", "CURRENT_TIME": + RexNode currentTimeCall = + context.rexBuilder.makeCall(SqlStdOperatorTable.CURRENT_TIME, List.of()); + return List.of(currentTimeCall, context.rexBuilder.makeFlag(SqlTypeName.TIME)); + case "CURRENT_DATE", "CURDATE": + RexNode currentDateCall = + context.rexBuilder.makeCall(SqlStdOperatorTable.CURRENT_DATE, List.of()); + return List.of(currentDateCall, context.rexBuilder.makeFlag(SqlTypeName.DATE)); + case "TIMESTAMP", + "TIMEDIFF", + "TIME_TO_SEC", + "TIME_FORMAT", + "TO_SECONDS", + "TO_DAYS", + "CONVERT_TZ": + List timestampArgs = new ArrayList<>(argList); + timestampArgs.addAll( + argList.stream() + .map(p -> context.rexBuilder.makeFlag(transferDateRelatedTimeName(p))) + .collect(Collectors.toList())); + timestampArgs.add(context.rexBuilder.makeLiteral(currentTimestampStr)); + return timestampArgs; + case "YEARWEEK", "WEEKDAY": + List weekdayArgs = new ArrayList<>(argList); + weekdayArgs.addAll( + argList.stream() + .map(p -> context.rexBuilder.makeFlag(transferDateRelatedTimeName(p))) + .collect(Collectors.toList())); + weekdayArgs.add(context.rexBuilder.makeLiteral(currentTimestampStr)); + return weekdayArgs; + case "TIMESTAMPADD": + List timestampAddArgs = new ArrayList<>(argList); + timestampAddArgs.add( + context.rexBuilder.makeFlag(argList.get(2).getType().getSqlTypeName())); + timestampAddArgs.add(context.rexBuilder.makeLiteral(currentTimestampStr)); + return timestampAddArgs; + case "TIMESTAMPDIFF": + List timestampDiffArgs = new ArrayList<>(); + timestampDiffArgs.add(argList.getFirst()); + timestampDiffArgs.addAll(buildArgsWithTypes(context.rexBuilder, argList, 1, 2)); + timestampDiffArgs.add(context.rexBuilder.makeLiteral(currentTimestampStr)); + return timestampDiffArgs; + case "DATEDIFF": + // datediff differs with timestamp diff in that it + List dateDiffArgs = buildArgsWithTypes(context.rexBuilder, argList, 0, 1); + dateDiffArgs.add(context.rexBuilder.makeLiteral(currentTimestampStr)); + return dateDiffArgs; + case "DAYNAME", "MONTHNAME": + List periodNameArgs = new ArrayList<>(); + periodNameArgs.add(argList.getFirst()); + periodNameArgs.add(context.rexBuilder.makeLiteral(capitalOP)); + periodNameArgs.add( + context.rexBuilder.makeFlag(argList.getFirst().getType().getSqlTypeName())); + return periodNameArgs; + case "DATE_SUB": + List dateSubArgs = transformDateManipulationArgs(argList, context.rexBuilder); + // A flag that represents isAdd + dateSubArgs.add(context.rexBuilder.makeLiteral(false)); + dateSubArgs.add(context.rexBuilder.makeFlag(SqlTypeName.TIMESTAMP)); + dateSubArgs.add(context.rexBuilder.makeLiteral(currentTimestampStr)); + return dateSubArgs; + case "DATE_ADD": + List dateAddArgs = transformDateManipulationArgs(argList, context.rexBuilder); + dateAddArgs.add(context.rexBuilder.makeLiteral(true)); + dateAddArgs.add(context.rexBuilder.makeFlag(SqlTypeName.TIMESTAMP)); + dateAddArgs.add(context.rexBuilder.makeLiteral(currentTimestampStr)); + return dateAddArgs; + case "ADDTIME": + SqlTypeName arg0Type = transferDateRelatedTimeName(argList.getFirst()); + SqlTypeName arg1Type = transferDateRelatedTimeName(argList.get(1)); + RexNode type0 = context.rexBuilder.makeFlag(arg0Type); + RexNode type1 = context.rexBuilder.makeFlag(arg1Type); + RexNode isAdd = context.rexBuilder.makeLiteral(true); + + return List.of( + argList.getFirst(), + type0, + argList.get(1), + type1, + isAdd, + context.rexBuilder.makeLiteral(currentTimestampStr)); + case "ADDDATE": + return transformAddOrSubDateArgs(argList, context.rexBuilder, true, currentTimestampStr); + case "SUBDATE": + return transformAddOrSubDateArgs(argList, context.rexBuilder, false, currentTimestampStr); + case "SUBTIME": + List subTimeArgs = transformTimeManipulationArgs(argList, context.rexBuilder); + subTimeArgs.add(context.rexBuilder.makeLiteral(false)); + subTimeArgs.add(context.rexBuilder.makeLiteral(currentTimestampStr)); + return subTimeArgs; + case "TIME": + return ImmutableList.of( + argList.getFirst(), + context.rexBuilder.makeFlag(transferDateRelatedTimeName(argList.getFirst()))); + case "DATE_FORMAT", "FORMAT_TIMESTAMP": + RexNode dateExpr = argList.get(0); + RexNode dateFormatPatternExpr = argList.get(1); + RexNode datetimeType; + datetimeType = context.rexBuilder.makeFlag(transferDateRelatedTimeName(dateExpr)); + return ImmutableList.of( + dateExpr, + datetimeType, + dateFormatPatternExpr, + context.rexBuilder.makeLiteral(currentTimestampStr)); + case "UNIX_TIMESTAMP": + List unixArgs = new ArrayList<>(argList); + unixArgs.add(context.rexBuilder.makeFlag(transferDateRelatedTimeName(argList.getFirst()))); + unixArgs.add(context.rexBuilder.makeLiteral(currentTimestampStr)); + return unixArgs; + case "WEEK", "WEEK_OF_YEAR": + RexNode woyMode; + if (argList.size() >= 2) { + woyMode = argList.get(1); + } else { + woyMode = + context.rexBuilder.makeLiteral( + 0, context.rexBuilder.getTypeFactory().createSqlType(SqlTypeName.INTEGER)); + } + return List.of( + argList.getFirst(), + woyMode, + context.rexBuilder.makeFlag(argList.getFirst().getType().getSqlTypeName())); + case "STR_TO_DATE": + List strToDateArgs = new ArrayList<>(argList); + strToDateArgs.add(context.rexBuilder.makeLiteral(currentTimestampStr)); + return strToDateArgs; + case "MINUTE_OF_DAY", "MICROSECOND": + // Convert STRING/TIME/TIMESTAMP to TIMESTAMP + return ImmutableList.of( + argList.getFirst(), + context.rexBuilder.makeFlag(transferDateRelatedTimeName(argList.getFirst()))); + case "EXTRACT": + return ImmutableList.of( + argList.getFirst(), + argList.get(1), + context.rexBuilder.makeFlag(transferDateRelatedTimeName(argList.get(1))), + context.rexBuilder.makeLiteral(currentTimestampStr)); + case "DATETIME": + // Convert timestamp to a string to reuse OS PPL V2's implementation + RexNode argTimestamp = argList.getFirst(); + if (argTimestamp.getType().getSqlTypeName().equals(SqlTypeName.TIMESTAMP)) { + argTimestamp = + makeConversionCall( + "DATE_FORMAT", + ImmutableList.of(argTimestamp, context.rexBuilder.makeLiteral("%Y-%m-%d %T")), + context, + currentTimestampStr); + } + return Stream.concat(Stream.of(argTimestamp), argList.stream().skip(1)).toList(); + case "UTC_TIMESTAMP", "UTC_TIME", "UTC_DATE": + return List.of(context.rexBuilder.makeLiteral(currentTimestampStr)); case "TYPEOF": return List.of( context.rexBuilder.makeLiteral( @@ -282,4 +673,137 @@ static List translateArgument( return argList; } } + + private static RexNode makeConversionCall( + String funcName, + List arguments, + CalcitePlanContext context, + String currentTimestampStr) { + SqlOperator operator = translate(funcName); + List translatedArguments = + translateArgument(funcName, arguments, context, currentTimestampStr); + RelDataType returnType = + deriveReturnType(funcName, context.rexBuilder, operator, translatedArguments); + return context.rexBuilder.makeCall(returnType, operator, translatedArguments); + } + + static RelDataType deriveReturnType( + String funcName, RexBuilder rexBuilder, SqlOperator operator, List exprs) { + RelDataType returnType = + switch (funcName.toUpperCase(Locale.ROOT)) { + // This effectively invalidates the operand type check, which leads to unnecessary + // incompatible parameter type errors + case "DATEDIFF" -> rexBuilder.getTypeFactory().createSqlType(SqlTypeName.BIGINT); + case "TIMESTAMPDIFF" -> rexBuilder.getTypeFactory().createSqlType(SqlTypeName.BIGINT); + case "YEAR", + "MINUTE", + "HOUR", + "HOUR_OF_DAY", + "MONTH", + "MONTH_OF_YEAR", + "DAY_OF_YEAR", + "DAYOFYEAR", + "DAY_OF_MONTH", + "DAYOFMONTH", + "DAY_OF_WEEK", + "DAYOFWEEK", + "DAY", + "MINUTE_OF_HOUR", + "QUARTER", + "SECOND", + "SECOND_OF_MINUTE" -> rexBuilder.getTypeFactory().createSqlType(SqlTypeName.INTEGER); + default -> rexBuilder.deriveReturnType(operator, exprs); + }; + // Make all return types nullable + return rexBuilder.getTypeFactory().createTypeWithNullability(returnType, true); + } + + private static List transformDateManipulationArgs( + List argList, ExtendedRexBuilder rexBuilder) { + List dateAddArgs = new ArrayList<>(); + RexNode baseTimestampExpr = argList.get(0); + RexNode intervalExpr = argList.get(1); + // 1. Add time unit + RexLiteral timeFrameName = + rexBuilder.makeFlag( + Objects.requireNonNull(intervalExpr.getType().getIntervalQualifier()).getUnit()); + dateAddArgs.add(timeFrameName); + // 2. Add interval + RexLiteral intervalArg = + rexBuilder.makeBigintLiteral(((RexLiteral) intervalExpr).getValueAs(BigDecimal.class)); + dateAddArgs.add(intervalArg); + // 3. Add timestamp + dateAddArgs.add(baseTimestampExpr); + // 4. Add original sql type + dateAddArgs.add(rexBuilder.makeFlag(transferDateRelatedTimeName(baseTimestampExpr))); + return dateAddArgs; + } + + private static List transformAddOrSubDateArgs( + List argList, + ExtendedRexBuilder rexBuilder, + Boolean isAdd, + String currentTimestampStr) { + List addOrSubDateArgs = new ArrayList<>(); + addOrSubDateArgs.add(argList.getFirst()); + SqlTypeName addType = argList.get(1).getType().getSqlTypeName(); + if (addType == SqlTypeName.BIGINT + || addType == SqlTypeName.DECIMAL + || addType == SqlTypeName.INTEGER) { + Number value = ((RexLiteral) argList.get(1)).getValueAs(Number.class); + addOrSubDateArgs.add( + rexBuilder.makeIntervalLiteral( + new BigDecimal(value.toString()), + new SqlIntervalQualifier(TimeUnit.DAY, null, SqlParserPos.ZERO))); + } else { + addOrSubDateArgs.add(argList.get(1)); + } + List addOrSubDateRealInput = + transformDateManipulationArgs(addOrSubDateArgs, rexBuilder); + addOrSubDateRealInput.add(rexBuilder.makeLiteral(isAdd)); + SqlTypeName firstType = transferDateRelatedTimeName(argList.getFirst()); + if (firstType == SqlTypeName.DATE + && (addType == SqlTypeName.BIGINT + || addType == SqlTypeName.DECIMAL + || addType == SqlTypeName.INTEGER)) { + addOrSubDateRealInput.add(rexBuilder.makeFlag(SqlTypeName.DATE)); + addOrSubDateRealInput.add( + rexBuilder.makeLiteral(0, rexBuilder.getTypeFactory().createSqlType(SqlTypeName.DATE))); + } else { + addOrSubDateRealInput.add(rexBuilder.makeFlag(SqlTypeName.TIMESTAMP)); + addOrSubDateRealInput.add( + rexBuilder.makeLiteral( + 0L, rexBuilder.getTypeFactory().createSqlType(SqlTypeName.TIMESTAMP))); + } + addOrSubDateRealInput.add(rexBuilder.makeLiteral(currentTimestampStr)); + return addOrSubDateRealInput; + } + + private static List transformTimeManipulationArgs( + List argList, ExtendedRexBuilder rexBuilder) { + SqlTypeName arg0Type = transferDateRelatedTimeName(argList.getFirst()); + SqlTypeName arg1Type = transferDateRelatedTimeName(argList.get(1)); + RexNode type0 = rexBuilder.makeFlag(arg0Type); + RexNode type1 = rexBuilder.makeFlag(arg1Type); + return new ArrayList<>(List.of(argList.getFirst(), type0, argList.get(1), type1)); + } + + /** + * Builds a list of RexNodes where each selected argument is followed by a RexNode representing + * its SQL type. + * + * @param rexBuilder the RexBuilder instance used to create type flags + * @param args the original list of arguments + * @return A new list of RexNodes: [arg, typeFlag, arg, typeFlag, ...] + */ + private static List buildArgsWithTypes( + RexBuilder rexBuilder, List args, int... indexes) { + List result = new ArrayList<>(); + for (int index : indexes) { + RexNode arg = args.get(index); + result.add(arg); + result.add(rexBuilder.makeFlag(transferDateRelatedTimeName(arg))); + } + return result; + } } diff --git a/core/src/main/java/org/opensearch/sql/calcite/utils/UserDefinedFunctionUtils.java b/core/src/main/java/org/opensearch/sql/calcite/utils/UserDefinedFunctionUtils.java index 519edd36eb..3758d1c73f 100644 --- a/core/src/main/java/org/opensearch/sql/calcite/utils/UserDefinedFunctionUtils.java +++ b/core/src/main/java/org/opensearch/sql/calcite/utils/UserDefinedFunctionUtils.java @@ -6,10 +6,22 @@ package org.opensearch.sql.calcite.utils; import static org.apache.calcite.sql.type.SqlTypeUtil.createArrayType; +import static org.opensearch.sql.calcite.utils.OpenSearchTypeFactory.*; +import static org.opensearch.sql.calcite.utils.OpenSearchTypeFactory.ExprUDT.*; +import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_VARIABLE_NANOS_OPTIONAL; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeParseException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Locale; +import java.util.Objects; import org.apache.calcite.linq4j.tree.Types; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; @@ -22,16 +34,36 @@ import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.type.InferTypes; +import org.apache.calcite.sql.type.ReturnTypes; import org.apache.calcite.sql.type.SqlReturnTypeInference; import org.apache.calcite.sql.type.SqlTypeName; +import org.apache.calcite.sql.type.SqlTypeTransforms; import org.apache.calcite.sql.validate.SqlUserDefinedAggFunction; import org.apache.calcite.sql.validate.SqlUserDefinedFunction; import org.apache.calcite.tools.RelBuilder; import org.apache.calcite.util.Optionality; +import org.opensearch.sql.calcite.type.ExprSqlType; import org.opensearch.sql.calcite.udf.UserDefinedAggFunction; import org.opensearch.sql.calcite.udf.UserDefinedFunction; +import org.opensearch.sql.exception.SemanticCheckException; +import org.opensearch.sql.executor.QueryType; +import org.opensearch.sql.expression.function.FunctionProperties; public class UserDefinedFunctionUtils { + public static SqlReturnTypeInference INTEGER_FORCE_NULLABLE = + ReturnTypes.INTEGER.andThen(SqlTypeTransforms.FORCE_NULLABLE); + public static RelDataType nullableTimeUDT = TYPE_FACTORY.createUDT(EXPR_TIME, true); + public static RelDataType nullableDateUDT = TYPE_FACTORY.createUDT(EXPR_DATE, true); + public static RelDataType nullableTimestampUDT = + TYPE_FACTORY.createUDT(ExprUDT.EXPR_TIMESTAMP, true); + + public static SqlReturnTypeInference timestampInference = + ReturnTypes.explicit(nullableTimestampUDT); + + public static SqlReturnTypeInference timeInference = ReturnTypes.explicit(nullableTimeUDT); + + public static SqlReturnTypeInference dateInference = ReturnTypes.explicit(nullableDateUDT); + public static RelBuilder.AggCall TransferUserDefinedAggFunction( Class UDAF, String functionName, @@ -72,21 +104,6 @@ public static SqlOperator TransferUserDefinedFunction( udfFunction); } - public static SqlReturnTypeInference getReturnTypeInferenceForArray() { - return opBinding -> { - RelDataTypeFactory typeFactory = opBinding.getTypeFactory(); - - // Get argument types - List argTypes = opBinding.collectOperandTypes(); - - if (argTypes.isEmpty()) { - throw new IllegalArgumentException("Function requires at least one argument."); - } - RelDataType firstArgType = argTypes.getFirst(); - return createArrayType(typeFactory, firstArgType, true); - }; - } - /** * Infer return argument type as the widest return type among arguments as specified positions. * E.g. (Integer, Long) -> Long; (Double, Float, SHORT) -> Double @@ -96,7 +113,7 @@ public static SqlReturnTypeInference getReturnTypeInferenceForArray() { * @return The type inference */ public static SqlReturnTypeInference getLeastRestrictiveReturnTypeAmongArgsAt( - List positions, boolean nullable) { + List positions) { return opBinding -> { RelDataTypeFactory typeFactory = opBinding.getTypeFactory(); List types = new ArrayList<>(); @@ -114,18 +131,11 @@ public static SqlReturnTypeInference getLeastRestrictiveReturnTypeAmongArgsAt( "Cannot determine a common type for the given positions."); } - return typeFactory.createTypeWithNullability(widerType, nullable); + return widerType; }; } - /** - * For some udf/udaf, when giving a list of arguments, we need to infer the return type from the - * arguments. - * - * @param targetPosition - * @return a inference function - */ - public static SqlReturnTypeInference getReturnTypeInference(int targetPosition) { + static SqlReturnTypeInference getReturnTypeInferenceForArray() { return opBinding -> { RelDataTypeFactory typeFactory = opBinding.getTypeFactory(); @@ -135,30 +145,175 @@ public static SqlReturnTypeInference getReturnTypeInference(int targetPosition) if (argTypes.isEmpty()) { throw new IllegalArgumentException("Function requires at least one argument."); } - RelDataType firstArgType = argTypes.get(targetPosition); - return typeFactory.createTypeWithNullability( - typeFactory.createSqlType(firstArgType.getSqlTypeName()), true); + RelDataType firstArgType = argTypes.getFirst(); + return createArrayType(typeFactory, firstArgType, true); }; } /** - * For some udf/udaf, We need to create nullable types arguments. - * - * @param typeName - * @return a inference function + * ADDTIME and SUBTIME has special return type maps: (DATE/TIMESTAMP, DATE/TIMESTAMP/TIME) -> + * TIMESTAMP (TIME, DATE/TIMESTAMP/TIME) -> TIME Therefore, we create a special return type + * inference for them. */ - public static SqlReturnTypeInference getNullableReturnTypeInferenceForFixedType( - SqlTypeName typeName) { + static SqlReturnTypeInference getReturnTypeForTimeAddSub() { return opBinding -> { - RelDataTypeFactory typeFactory = opBinding.getTypeFactory(); + RelDataType operandType0 = opBinding.getOperandType(0); + if (operandType0 instanceof ExprSqlType) { + ExprUDT exprUDT = ((ExprSqlType) operandType0).getUdt(); + if (exprUDT == EXPR_DATE || exprUDT == EXPR_TIMESTAMP) { + return nullableTimestampUDT; + } else if (exprUDT == EXPR_TIME) { + return nullableTimeUDT; + } else { + throw new IllegalArgumentException("Unsupported UDT type"); + } + } + SqlTypeName typeName = operandType0.getSqlTypeName(); + return switch (typeName) { + case DATE, TIMESTAMP -> + // Return TIMESTAMP + nullableTimestampUDT; + case TIME -> + // Return TIME + nullableTimeUDT; + default -> throw new IllegalArgumentException("Unsupported type: " + typeName); + }; + }; + } - // Get argument types - List argTypes = opBinding.collectOperandTypes(); + static List transferStringExprToDateValue(String timeExpr) { + try { + if (timeExpr.contains(":")) { + // A timestamp + LocalDateTime localDateTime = + LocalDateTime.parse(timeExpr, DATE_TIME_FORMATTER_VARIABLE_NANOS_OPTIONAL); + return List.of( + localDateTime.getYear(), localDateTime.getMonthValue(), localDateTime.getDayOfMonth()); + } else { + LocalDate localDate = + LocalDate.parse(timeExpr, DATE_TIME_FORMATTER_VARIABLE_NANOS_OPTIONAL); + return List.of(localDate.getYear(), localDate.getMonthValue(), localDate.getDayOfMonth()); + } + } catch (DateTimeParseException e) { + throw new SemanticCheckException( + String.format("date:%s in unsupported format, please use 'yyyy-MM-dd'", timeExpr)); + } + } - if (argTypes.isEmpty()) { - throw new IllegalArgumentException("Function requires at least one argument."); + /** + * Check whether a function gets enough arguments. + * + * @param funcName the name of the function + * @param expectedArguments the number of expected arguments + * @param actualArguments the number of actual arguments + * @param exactMatch whether the number of actual arguments should precisely match the number of + * expected arguments. If false, it suffices as long as the number of actual number of + * arguments is not smaller that the number of expected arguments. + * @throws IllegalArgumentException if the argument length does not match the expected one + */ + public static void validateArgumentCount( + String funcName, int expectedArguments, int actualArguments, boolean exactMatch) { + if (exactMatch) { + if (actualArguments != expectedArguments) { + throw new IllegalArgumentException( + String.format( + "Mismatch arguments: function %s expects %d arguments, but got %d", + funcName, expectedArguments, actualArguments)); } - return typeFactory.createTypeWithNullability(typeFactory.createSqlType(typeName), true); - }; + } else { + if (actualArguments < expectedArguments) { + throw new IllegalArgumentException( + String.format( + "Mismatch arguments: function %s expects at least %d arguments, but got %d", + funcName, expectedArguments, actualArguments)); + } + } + } + + /** + * Validates that the given list of objects matches the given list of types. + * + *

This function first checks if the sizes of the two lists match. If not, it throws an {@code + * IllegalArgumentException}. Then, it iterates through the lists and checks if each object is an + * instance of the corresponding type. If any object is not of the expected type, it throws an + * {@code IllegalArgumentException} with a descriptive message. + * + * @param objects the list of objects to validate + * @param types the list of expected types + * @throws IllegalArgumentException if the sizes of the lists do not match or if any object is not + * an instance of the corresponding type + */ + public static void validateArgumentTypes(List objects, List> types) { + validateArgumentTypes(objects, types, Collections.nCopies(types.size(), false)); + } + + public static void validateArgumentTypes( + List objects, List> types, boolean nullable) { + validateArgumentTypes(objects, types, Collections.nCopies(types.size(), nullable)); + } + + public static void validateArgumentTypes( + List objects, List> types, List nullables) { + if (objects.size() < types.size()) { + throw new IllegalArgumentException( + String.format( + "Mismatch in the number of objects and types. Got %d objects and %d types", + objects.size(), types.size())); + } + for (int i = 0; i < types.size(); i++) { + if (objects.get(i) == null && nullables.get(i)) { + continue; + } + if (!types.get(i).isInstance(objects.get(i))) { + throw new IllegalArgumentException( + String.format( + "Object at index %d is not of type %s (Got %s)", + i, + types.get(i).getName(), + objects.get(i) == null ? "null" : objects.get(i).getClass().getName())); + } + } + } + + /** Check whether the given array contains null values. */ + public static boolean containsNull(Object[] objects) { + return Arrays.stream(objects).anyMatch(Objects::isNull); + } + + public static String formatTimestampWithoutUnnecessaryNanos(LocalDateTime localDateTime) { + String base = localDateTime.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")); + int nano = localDateTime.getNano(); + if (nano == 0) return base; + + String nanoStr = String.format(Locale.ENGLISH, "%09d", nano); + nanoStr = nanoStr.replaceFirst("0+$", ""); + if (!nanoStr.isEmpty()) { + return base + "." + nanoStr; + } + return base; + } + + public static SqlTypeName transferDateRelatedTimeName(RexNode candidate) { + RelDataType type = candidate.getType(); + if (type instanceof ExprSqlType) { + ExprUDT exprUDT = ((ExprSqlType) type).getUdt(); + if (exprUDT == EXPR_TIME) { + return SqlTypeName.TIME; + } else if (exprUDT == EXPR_TIMESTAMP) { + return SqlTypeName.TIMESTAMP; + } else if (exprUDT == EXPR_DATE) { + return SqlTypeName.DATE; + } + } + return type.getSqlTypeName(); + } + + // TODO: pass the function properties directly to the UDF instead of string + public static FunctionProperties restoreFunctionProperties(Object timestampStr) { + String expression = (String) timestampStr; + Instant parsed = Instant.parse(expression); + FunctionProperties functionProperties = + new FunctionProperties(parsed, ZoneId.systemDefault(), QueryType.PPL); + return functionProperties; } } diff --git a/core/src/main/java/org/opensearch/sql/calcite/utils/datetime/DateTimeApplyUtils.java b/core/src/main/java/org/opensearch/sql/calcite/utils/datetime/DateTimeApplyUtils.java new file mode 100644 index 0000000000..23e9e40f0c --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/utils/datetime/DateTimeApplyUtils.java @@ -0,0 +1,85 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.utils.datetime; + +import static org.opensearch.sql.calcite.utils.OpenSearchTypeFactory.convertSqlTypeNameToExprType; +import static org.opensearch.sql.data.model.ExprValueUtils.fromObjectValue; + +import java.time.Duration; +import java.time.Instant; +import java.time.Period; +import java.time.temporal.TemporalAmount; +import org.apache.calcite.avatica.util.TimeUnit; +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprTimestampValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.data.type.ExprCoreType; +import org.opensearch.sql.exception.SemanticCheckException; +import org.opensearch.sql.expression.function.FunctionProperties; + +public interface DateTimeApplyUtils { + static Instant applyInterval(Instant base, Duration interval, boolean isAdd) { + return isAdd ? base.plus(interval) : base.minus(interval); + } + + public static ExprValue transferInputToExprValue(Object candidate, SqlTypeName sqlTypeName) { + return fromObjectValue(candidate, convertSqlTypeNameToExprType(sqlTypeName)); + } + + public static ExprValue transferInputToExprTimestampValue( + Object candidate, SqlTypeName sqlTypeName, FunctionProperties properties) { + switch (sqlTypeName) { + case TIME: + ExprTimeValue timeValue = + (ExprTimeValue) fromObjectValue(candidate, convertSqlTypeNameToExprType(sqlTypeName)); + return new ExprTimestampValue(timeValue.timestampValue(properties)); + default: + try { + return new ExprTimestampValue( + fromObjectValue(candidate, convertSqlTypeNameToExprType(sqlTypeName)) + .timestampValue()); + } catch (SemanticCheckException e) { + ExprTimeValue hardTransferredTimeValue = + (ExprTimeValue) fromObjectValue(candidate, ExprCoreType.TIME); + return new ExprTimestampValue(hardTransferredTimeValue.timestampValue(properties)); + } + } + } + + /** + * Create a temporal amount of the given number of units. For duration below a day, it returns + * duration; for duration including and above a day, it returns period for natural days, months, + * quarters, and years, which may be of unfixed lengths. + * + * @param number The count of unit + * @param unit The unit of the temporal amount + * @return A temporal amount value, can be either a Period or a Duration + */ + static TemporalAmount convertToTemporalAmount(long number, TimeUnit unit) { + return switch (unit) { + case YEAR -> Period.ofYears((int) number); + case QUARTER -> Period.ofMonths((int) number * 3); + case MONTH -> Period.ofMonths((int) number); + case WEEK -> Period.ofWeeks((int) number); + case DAY -> Period.ofDays((int) number); + case HOUR -> Duration.ofHours(number); + case MINUTE -> Duration.ofMinutes(number); + case SECOND -> Duration.ofSeconds(number); + case MILLISECOND -> Duration.ofMillis(number); + case MICROSECOND -> Duration.ofNanos(number * 1000); + case NANOSECOND -> Duration.ofNanos(number); + + default -> throw new UnsupportedOperationException( + "No mapping defined for Calcite TimeUnit: " + unit); + }; + } + + static ExprValue transferTimeToTimestamp( + ExprValue candidate, FunctionProperties functionProperties) { + return new ExprTimestampValue(((ExprTimeValue) candidate).timestampValue(functionProperties)); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/utils/datetime/DateTimeParser.java b/core/src/main/java/org/opensearch/sql/calcite/utils/datetime/DateTimeParser.java new file mode 100644 index 0000000000..08f93f83a0 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/utils/datetime/DateTimeParser.java @@ -0,0 +1,131 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.utils.datetime; + +import com.google.common.collect.ImmutableList; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.util.List; +import org.opensearch.sql.exception.SemanticCheckException; +import org.opensearch.sql.utils.DateTimeFormatters; + +public interface DateTimeParser { + /** + * Parse a string into a LocalDateTime If only date is found, time is set to 00:00:00. If only + * time is found, date is set to today. + * + * @param input A date/time/timestamp string + * @return A LocalDateTime + * @throws IllegalArgumentException if parsing fails + */ + static LocalDateTime parse(String input) { + + if (input == null || input.trim().isEmpty()) { + throw new SemanticCheckException("Cannot parse a null/empty date-time string."); + } + + if (input.contains(":")) { + try { + return parseTimestamp(input); + } catch (Exception ignored) { + } + + try { + LocalTime t = parseTime(input); + return LocalDateTime.of(LocalDate.now(ZoneId.of("UTC")), t); + } catch (Exception ignored) { + } + } else { + try { + LocalDate d = parseDate(input); + return d.atStartOfDay(); + } catch (Exception ignored) { + } + } + throw new SemanticCheckException(String.format("Unable to parse %s as datetime", input)); + } + + static LocalDateTime parseTimeOrTimestamp(String input) { + if (input == null || input.trim().isEmpty()) { + throw new SemanticCheckException("Cannot parse a null/empty date-time string."); + } + + try { + return parseTime(input).atDate(LocalDate.now(ZoneId.of("UTC"))); + } catch (Exception ignored) { + } + + try { + return parseTimestamp(input); + } catch (Exception ignored) { + } + + throw new SemanticCheckException( + String.format("time:%s in unsupported format, please use 'HH:mm:ss[.SSSSSSSSS]'", input)); + } + + static LocalDateTime parseDateOrTimestamp(String input) { + if (input == null || input.trim().isEmpty()) { + throw new SemanticCheckException("Cannot parse a null/empty date-time string."); + } + + try { + return parseDate(input).atStartOfDay(); + } catch (Exception ignored) { + } + + try { + return parseTimestamp(input); + } catch (Exception ignored) { + } + + throw new SemanticCheckException( + String.format("date:%s in unsupported format, please use 'yyyy-MM-dd'", input)); + } + + static LocalDateTime parseTimestamp(String input) { + List dateTimeFormatters = + ImmutableList.of(DateTimeFormatters.DATE_TIME_FORMATTER_VARIABLE_NANOS_OPTIONAL); + + for (DateTimeFormatter fmt : dateTimeFormatters) { + try { + return LocalDateTime.parse(input, fmt); + } catch (Exception ignored) { + } + } + throw new SemanticCheckException( + String.format( + "timestamp:%s in unsupported format, please use 'yyyy-MM-dd HH:mm:ss[.SSSSSSSSS]'", + input)); + } + + static LocalTime parseTime(String input) { + List timeFormatters = ImmutableList.of(DateTimeFormatter.ISO_TIME); + for (DateTimeFormatter fmt : timeFormatters) { + try { + return LocalTime.parse(input, fmt); + } catch (Exception ignored) { + } + } + throw new SemanticCheckException( + String.format("time:%s in unsupported format, please use 'HH:mm:ss[.SSSSSSSSS]'", input)); + } + + static LocalDate parseDate(String input) { + List dateFormatters = ImmutableList.of(DateTimeFormatter.ISO_DATE); + for (DateTimeFormatter fmt : dateFormatters) { + try { + return LocalDate.parse(input, fmt); + } catch (Exception ignored) { + } + } + throw new SemanticCheckException( + String.format("date:%s in unsupported format, please use 'yyyy-MM-dd'", input)); + } +} diff --git a/core/src/main/java/org/opensearch/sql/calcite/utils/datetime/InstantUtils.java b/core/src/main/java/org/opensearch/sql/calcite/utils/datetime/InstantUtils.java new file mode 100644 index 0000000000..866082f202 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/calcite/utils/datetime/InstantUtils.java @@ -0,0 +1,144 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.utils.datetime; + +import java.time.*; +import org.apache.calcite.sql.type.SqlTypeName; +import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprValueUtils; +import org.opensearch.sql.data.type.ExprCoreType; +import org.opensearch.sql.exception.SemanticCheckException; +import org.opensearch.sql.expression.function.FunctionProperties; + +public interface InstantUtils { + + /** + * Convert epoch milliseconds to Instant. + * + * @param epochMillis epoch milliseconds + * @return Instant that represents the given epoch milliseconds + */ + public static Instant fromEpochMills(long epochMillis) { + return Instant.ofEpochMilli(epochMillis); + } + + /** + * Convert internal date to Instant. + * + * @param date internal date in days since epoch + * @return Instant that represents the given date at timezone UTC at 00:00:00 + */ + static Instant fromInternalDate(int date) { + LocalDate localDate = LocalDate.ofEpochDay(date); + return localDate.atStartOfDay(ZoneId.of("UTC")).toInstant(); + } + + /** + * Convert internal time to Instant. + * + * @param time internal time in milliseconds + * @return Instant that represents the current day with the given time at timezone UTC + */ + static Instant fromInternalTime(int time) { + LocalDate todayUtc = LocalDate.now(ZoneId.of("UTC")); + ZonedDateTime startOfDayUtc = todayUtc.atStartOfDay(ZoneId.of("UTC")); + + return startOfDayUtc.toInstant().plus(Duration.ofMillis(time)); + } + + static Instant fromStringExpr(String timestampExpression) { + LocalDateTime datetime = DateTimeParser.parse(timestampExpression); + return datetime.atZone(ZoneId.of("UTC")).toInstant(); + } + + /** + * Convert internal calcite date/time/timestamp to Instant. + * + * @param candidate internal date/time/timestamp. Date is represented as days since epoch, time is + * represented as milliseconds, and timestamp is represented as epoch milliseconds + * @param sqlTypeName type of the internalDatetime + * @return Instant that represents the given internalDatetime + */ + static Instant convertToInstant(Object candidate, SqlTypeName sqlTypeName) { + Instant dateTimeBase = null; + switch (sqlTypeName) { + case DATE: + dateTimeBase = InstantUtils.fromInternalDate((int) candidate); + break; + case TIMESTAMP: + dateTimeBase = InstantUtils.fromEpochMills((long) candidate); + break; + case TIME: + dateTimeBase = InstantUtils.fromInternalTime((int) candidate); + break; + default: + dateTimeBase = InstantUtils.fromStringExpr((String) candidate); + } + return dateTimeBase; + } + + static LocalDateTime parseStringToTimestamp(String input, FunctionProperties functionProperties) { + try { + return parseTimeOrTimestamp(input, functionProperties); + } catch (SemanticCheckException e) { + return parseDateOrTimestamp(input); + } + } + + static LocalDateTime parseTimeOrTimestamp(String input, FunctionProperties functionProperties) { + if (input == null || input.trim().isEmpty()) { + throw new SemanticCheckException("Cannot parse a null/empty date-time string."); + } + + try { + return parseTimestamp(input); + } catch (Exception ignored) { + } + + try { + return parseTime(input, functionProperties); + } catch (Exception ignored) { + } + + throw new SemanticCheckException( + String.format("time:%s in unsupported format, please use 'HH:mm:ss[.SSSSSSSSS]'", input)); + } + + static LocalDateTime parseDateOrTimestamp(String input) { + if (input == null || input.trim().isEmpty()) { + throw new SemanticCheckException("Cannot parse a null/empty date-time string."); + } + + try { + return parseTimestamp(input); + } catch (Exception ignored) { + } + + try { + return parseDate(input); + } catch (Exception ignored) { + } + + throw new SemanticCheckException( + String.format("date:%s in unsupported format, please use 'yyyy-MM-dd'", input)); + } + + static LocalDateTime parseTimestamp(String input) { + return LocalDateTime.ofInstant( + ExprValueUtils.fromObjectValue(input, ExprCoreType.TIMESTAMP).timestampValue(), + ZoneOffset.UTC); + } + + static LocalDateTime parseTime(String input, FunctionProperties functionProperties) { + return LocalDateTime.ofInstant( + (new ExprTimeValue(input)).timestampValue(functionProperties), ZoneOffset.UTC); + } + + static LocalDateTime parseDate(String input) { + return LocalDateTime.ofInstant( + ExprValueUtils.fromObjectValue(input, ExprCoreType.DATE).timestampValue(), ZoneOffset.UTC); + } +} diff --git a/core/src/main/java/org/opensearch/sql/data/model/AbstractExprNumberValue.java b/core/src/main/java/org/opensearch/sql/data/model/AbstractExprNumberValue.java index 48781df847..b7cd5011f8 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/AbstractExprNumberValue.java +++ b/core/src/main/java/org/opensearch/sql/data/model/AbstractExprNumberValue.java @@ -52,4 +52,9 @@ public Double doubleValue() { public int hashCode() { return Objects.hashCode(value); } + + @Override + public boolean isNull() { + return value == null; + } } diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java b/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java index 5627777863..cfad7e85bc 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java @@ -5,6 +5,7 @@ package org.opensearch.sql.data.model; +import static org.opensearch.sql.data.type.ExprCoreType.*; import static org.opensearch.sql.utils.ExpressionUtils.PATH_SEP; import inet.ipaddr.IPAddress; @@ -23,6 +24,7 @@ import java.util.Map; import lombok.experimental.UtilityClass; import org.opensearch.sql.data.type.ExprCoreType; +import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.exception.ExpressionEvaluationException; /** The definition of {@link ExprValue} factory. */ @@ -161,7 +163,7 @@ public static ExprValue fromObjectValue(Object o) { } /** Construct ExprValue from Object with ExprCoreType. */ - public static ExprValue fromObjectValue(Object o, ExprCoreType type) { + public static ExprValue fromObjectValue(Object o, ExprType type) { switch (type) { case TIMESTAMP: return new ExprTimestampValue((String) o); diff --git a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java index 10507533bd..66eb31a758 100644 --- a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java +++ b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFormatterUtil.java @@ -32,7 +32,7 @@ * This class converts a SQL style DATE_FORMAT format specifier and converts it to a Java * SimpleDateTime format. */ -class DateTimeFormatterUtil { +public class DateTimeFormatterUtil { private static final int SUFFIX_SPECIAL_START_TH = 11; private static final int SUFFIX_SPECIAL_END_TH = 13; private static final String SUFFIX_SPECIAL_TH = "th"; @@ -48,7 +48,7 @@ interface DateTimeFormatHandler { String getFormat(LocalDateTime date); } - private static final Map DATE_HANDLERS = + public static final Map DATE_HANDLERS = ImmutableMap.builder() .put("%a", (date) -> "EEE") // %a => EEE - Abbreviated weekday name (Sun..Sat) .put("%b", (date) -> "LLL") // %b => LLL - Abbreviated month name (Jan..Dec) @@ -76,39 +76,46 @@ interface DateTimeFormatHandler { .put( "%D", (date) -> // %w - Day of month with English suffix - String.format("'%d%s'", date.getDayOfMonth(), getSuffix(date.getDayOfMonth()))) + String.format( + Locale.ROOT, "'%d%s'", date.getDayOfMonth(), getSuffix(date.getDayOfMonth()))) .put( "%f", (date) -> // %f - Microseconds - String.format(NANO_SEC_FORMAT, (date.getNano() / 1000))) + String.format(Locale.ROOT, NANO_SEC_FORMAT, (date.getNano() / 1000))) .put( "%w", (date) -> // %w - Day of week (0 indexed) - String.format("'%d'", date.getDayOfWeek().getValue())) + String.format(Locale.ROOT, "'%d'", date.getDayOfWeek().getValue())) .put( "%U", (date) -> // %U Week where Sunday is the first day - WEEK() mode 0 - String.format("'%d'", CalendarLookup.getWeekNumber(0, date.toLocalDate()))) + String.format( + Locale.ROOT, "'%d'", CalendarLookup.getWeekNumber(0, date.toLocalDate()))) .put( "%u", (date) -> // %u Week where Monday is the first day - WEEK() mode 1 - String.format("'%d'", CalendarLookup.getWeekNumber(1, date.toLocalDate()))) + String.format( + Locale.ROOT, "'%d'", CalendarLookup.getWeekNumber(1, date.toLocalDate()))) .put( "%V", (date) -> // %V Week where Sunday is the first day - WEEK() mode 2 used with %X - String.format("'%d'", CalendarLookup.getWeekNumber(2, date.toLocalDate()))) + String.format( + Locale.ROOT, "'%d'", CalendarLookup.getWeekNumber(2, date.toLocalDate()))) .put( "%v", (date) -> // %v Week where Monday is the first day - WEEK() mode 3 used with %x - String.format("'%d'", CalendarLookup.getWeekNumber(3, date.toLocalDate()))) + String.format( + Locale.ROOT, "'%d'", CalendarLookup.getWeekNumber(3, date.toLocalDate()))) .put( "%X", (date) -> // %X Year for week where Sunday is the first day, 4 digits used with %V - String.format("'%d'", CalendarLookup.getYearNumber(2, date.toLocalDate()))) + String.format( + Locale.ROOT, "'%d'", CalendarLookup.getYearNumber(2, date.toLocalDate()))) .put( "%x", (date) -> // %x Year for week where Monday is the first day, 4 digits used with %v - String.format("'%d'", CalendarLookup.getYearNumber(3, date.toLocalDate()))) + String.format( + Locale.ROOT, "'%d'", CalendarLookup.getYearNumber(3, date.toLocalDate()))) .build(); // Handlers for the time_format function. @@ -140,7 +147,7 @@ interface DateTimeFormatHandler { .put("%y", (date) -> "00") .put("%D", (date) -> null) // %f - Microseconds - .put("%f", (date) -> String.format(NANO_SEC_FORMAT, (date.getNano() / 1000))) + .put("%f", (date) -> String.format(Locale.ROOT, NANO_SEC_FORMAT, (date.getNano() / 1000))) .put("%w", (date) -> null) .put("%U", (date) -> null) .put("%u", (date) -> null) @@ -195,9 +202,20 @@ interface DateTimeFormatHandler { private DateTimeFormatterUtil() {} static StringBuffer getCleanFormat(ExprValue formatExpr) { + return getCleanFormat(formatExpr.stringValue()); + } + + /** + * Cleans the given format string by wrapping characters that are not preceded by a '%' and are + * not part of the allowed date/time format specifiers in single quotes. This ensures that these + * characters are treated as literals in the date/time format. + * + * @param formatStr the format string to be cleaned + * @return a StringBuffer containing the cleaned format string + */ + public static StringBuffer getCleanFormat(String formatStr) { final StringBuffer cleanFormat = new StringBuffer(); - final Matcher m = - CHARACTERS_WITH_NO_MOD_LITERAL_BEHIND_PATTERN.matcher(formatExpr.stringValue()); + final Matcher m = CHARACTERS_WITH_NO_MOD_LITERAL_BEHIND_PATTERN.matcher(formatStr); while (m.find()) { m.appendReplacement(cleanFormat, String.format("'%s'", m.group())); @@ -215,7 +233,7 @@ static StringBuffer getCleanFormat(ExprValue formatExpr) { * @param datetime The datetime argument being formatted * @return A formatted string expression */ - static ExprValue getFormattedString( + public static ExprValue getFormattedString( ExprValue formatExpr, Map handler, LocalDateTime datetime) { StringBuffer cleanFormat = getCleanFormat(formatExpr); @@ -243,6 +261,18 @@ static ExprValue getFormattedString( datetime.format(DateTimeFormatter.ofPattern(format.toString(), Locale.ENGLISH))); } + /** + * Format the datetime using the date format String. + * + * @param datetime the datetime to be formated + * @param formatStr the format of String type. + * @return Date formatted using format and returned as a String. + */ + public static String getFormattedDatetime(LocalDateTime datetime, String formatStr) { + return getFormattedString(new ExprStringValue(formatStr), DATE_HANDLERS, datetime) + .stringValue(); + } + /** * Format the date using the date format String. * @@ -250,12 +280,13 @@ static ExprValue getFormattedString( * @param formatExpr the format ExprValue of String type. * @return Date formatted using format and returned as a String. */ - static ExprValue getFormattedDate(ExprValue dateExpr, ExprValue formatExpr) { + public static ExprValue getFormattedDate(ExprValue dateExpr, ExprValue formatExpr) { final LocalDateTime date = dateExpr.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime(); return getFormattedString(formatExpr, DATE_HANDLERS, date); } - static ExprValue getFormattedDateOfToday(ExprValue formatExpr, ExprValue time, Clock current) { + public static ExprValue getFormattedDateOfToday( + ExprValue formatExpr, ExprValue time, Clock current) { final LocalDateTime date = LocalDateTime.of(LocalDate.now(current), time.timeValue()); return getFormattedString(formatExpr, DATE_HANDLERS, date); @@ -268,7 +299,7 @@ static ExprValue getFormattedDateOfToday(ExprValue formatExpr, ExprValue time, C * @param formatExpr the format ExprValue of String type. * @return Date formatted using format and returned as a String. */ - static ExprValue getFormattedTime(ExprValue timeExpr, ExprValue formatExpr) { + public static ExprValue getFormattedTime(ExprValue timeExpr, ExprValue formatExpr) { // Initializes DateTime with LocalDate.now(). This is safe because the date is ignored. // The time_format function will only return 0 or null for invalid string format specifiers. final LocalDateTime time = LocalDateTime.of(LocalDate.now(), timeExpr.timeValue()); diff --git a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunctions.java b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunctions.java index 79fff9f346..a570b5321f 100644 --- a/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunctions.java +++ b/core/src/main/java/org/opensearch/sql/expression/datetime/DateTimeFunctions.java @@ -27,19 +27,7 @@ import static org.opensearch.sql.expression.function.FunctionDSL.implWithProperties; import static org.opensearch.sql.expression.function.FunctionDSL.nullMissingHandling; import static org.opensearch.sql.expression.function.FunctionDSL.nullMissingHandlingWithProperties; -import static org.opensearch.sql.utils.DateTimeFormatters.DATE_FORMATTER_LONG_YEAR; -import static org.opensearch.sql.utils.DateTimeFormatters.DATE_FORMATTER_NO_YEAR; -import static org.opensearch.sql.utils.DateTimeFormatters.DATE_FORMATTER_SHORT_YEAR; -import static org.opensearch.sql.utils.DateTimeFormatters.DATE_FORMATTER_SINGLE_DIGIT_MONTH; -import static org.opensearch.sql.utils.DateTimeFormatters.DATE_FORMATTER_SINGLE_DIGIT_YEAR; -import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_LONG_YEAR; -import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_SHORT_YEAR; -import static org.opensearch.sql.utils.DateTimeFormatters.DATE_TIME_FORMATTER_STRICT_WITH_TZ; -import static org.opensearch.sql.utils.DateTimeFormatters.FULL_DATE_LENGTH; -import static org.opensearch.sql.utils.DateTimeFormatters.NO_YEAR_DATE_LENGTH; -import static org.opensearch.sql.utils.DateTimeFormatters.SHORT_DATE_LENGTH; -import static org.opensearch.sql.utils.DateTimeFormatters.SINGLE_DIGIT_MONTH_DATE_LENGTH; -import static org.opensearch.sql.utils.DateTimeFormatters.SINGLE_DIGIT_YEAR_DATE_LENGTH; +import static org.opensearch.sql.utils.DateTimeFormatters.*; import static org.opensearch.sql.utils.DateTimeUtils.extractDate; import static org.opensearch.sql.utils.DateTimeUtils.extractTimestamp; @@ -1127,7 +1115,7 @@ private ExprValue dayOfMonthToday(Clock clock) { return new ExprIntegerValue(LocalDateTime.now(clock).getDayOfMonth()); } - private ExprValue dayOfYearToday(Clock clock) { + public static ExprValue dayOfYearToday(Clock clock) { return new ExprIntegerValue(LocalDateTime.now(clock).getDayOfYear()); } @@ -1142,7 +1130,7 @@ private ExprValue weekOfYearToday(ExprValue mode, Clock clock) { * @param clock Current clock taken from function properties * @return ExprValue. */ - private ExprValue dayOfWeekToday(Clock clock) { + public static ExprValue dayOfWeekToday(Clock clock) { return new ExprIntegerValue((formatNow(clock).getDayOfWeek().getValue() % 7) + 1); } @@ -1168,7 +1156,7 @@ private ExprValue exprAddDateInterval( * @param isAdd A flag: true to isAdd, false to subtract. * @return Timestamp calculated. */ - private ExprValue exprDateApplyInterval( + public static ExprValue exprDateApplyInterval( FunctionProperties functionProperties, ExprValue datetime, TemporalAmount interval, @@ -1264,7 +1252,7 @@ private ExprValue exprApplyTime( * @param temporalDelta A Date/Time/Timestamp object to add time from. * @return A value calculated. */ - private ExprValue exprAddTime( + public static ExprValue exprAddTime( FunctionProperties functionProperties, ExprValue temporal, ExprValue temporalDelta) { return exprApplyTime(functionProperties, temporal, temporalDelta, true); } @@ -1278,7 +1266,8 @@ private ExprValue exprAddTime( * @param toTz ExprValue of time zone, representing the time to convert to. * @return Timestamp that has been converted to the to_tz timezone. */ - private ExprValue exprConvertTZ(ExprValue startingDateTime, ExprValue fromTz, ExprValue toTz) { + public static ExprValue exprConvertTZ( + ExprValue startingDateTime, ExprValue fromTz, ExprValue toTz) { if (startingDateTime.type() == ExprCoreType.STRING) { startingDateTime = exprDateTimeNoTimezone(startingDateTime); } @@ -1311,7 +1300,7 @@ private ExprValue exprConvertTZ(ExprValue startingDateTime, ExprValue fromTz, Ex * @param exprValue ExprValue of Date type or String type. * @return ExprValue. */ - private ExprValue exprDate(ExprValue exprValue) { + public static ExprValue exprDate(ExprValue exprValue) { if (exprValue instanceof ExprStringValue) { return new ExprDateValue(exprValue.stringValue()); } else { @@ -1327,7 +1316,7 @@ private ExprValue exprDate(ExprValue exprValue) { * @param second The second value. * @return The diff. */ - private ExprValue exprDateDiff( + public static ExprValue exprDateDiff( FunctionProperties functionProperties, ExprValue first, ExprValue second) { // java inverses the value, so we have to swap 1 and 2 return new ExprLongValue( @@ -1342,7 +1331,7 @@ private ExprValue exprDateDiff( * @param timeZone ExprValue of String type (or null). * @return ExprValue of date type. */ - private ExprValue exprDateTime(ExprValue timestamp, ExprValue timeZone) { + public static ExprValue exprDateTime(ExprValue timestamp, ExprValue timeZone) { String defaultTimeZone = TimeZone.getDefault().getID(); try { @@ -1383,7 +1372,7 @@ private ExprValue exprDateTime(ExprValue timestamp, ExprValue timeZone) { * @param dateTime ExprValue of String type. * @return ExprValue of date type. */ - private ExprValue exprDateTimeNoTimezone(ExprValue dateTime) { + public static ExprValue exprDateTimeNoTimezone(ExprValue dateTime) { return exprDateTime(dateTime, ExprNullValue.of()); } @@ -1404,7 +1393,7 @@ private ExprValue exprDayName(ExprValue date) { * @param date ExprValue of Date/String/Time/Timestamp type. * @return ExprValue. */ - private ExprValue exprDayOfMonth(ExprValue date) { + public static ExprValue exprDayOfMonth(ExprValue date) { return new ExprIntegerValue(date.dateValue().getDayOfMonth()); } @@ -1414,7 +1403,7 @@ private ExprValue exprDayOfMonth(ExprValue date) { * @param date ExprValue of Date/String/Timstamp type. * @return ExprValue. */ - private ExprValue exprDayOfWeek(ExprValue date) { + public static ExprValue exprDayOfWeek(ExprValue date) { return new ExprIntegerValue((date.dateValue().getDayOfWeek().getValue() % 7) + 1); } @@ -1424,7 +1413,7 @@ private ExprValue exprDayOfWeek(ExprValue date) { * @param date ExprValue of Date/String type. * @return ExprValue. */ - private ExprValue exprDayOfYear(ExprValue date) { + public static ExprValue exprDayOfYear(ExprValue date) { return new ExprIntegerValue(date.dateValue().getDayOfYear()); } @@ -1435,8 +1424,8 @@ private ExprValue exprDayOfYear(ExprValue date) { * @param timestamp the date to be formatted as an ExprValue. * @return is a LONG formatted according to the input arguments. */ - public ExprLongValue formatExtractFunction(ExprValue part, ExprValue timestamp) { - String partName = part.stringValue().toUpperCase(); + public static ExprLongValue formatExtractFunction(ExprValue part, ExprValue timestamp) { + String partName = part.stringValue().toUpperCase(Locale.ROOT); LocalDateTime arg = timestamp.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime(); // Override "Week" to use the IsoFields week-of-week-based-year format @@ -1468,7 +1457,7 @@ private ExprValue exprExtract(ExprValue part, ExprValue timestamp) { * @param time The time to be formatted. * @return A LONG */ - private ExprValue exprExtractForTime( + public static ExprValue exprExtractForTime( FunctionProperties functionProperties, ExprValue part, ExprValue time) { return formatExtractFunction( part, new ExprTimestampValue(extractTimestamp(time, functionProperties))); @@ -1480,11 +1469,11 @@ private ExprValue exprExtractForTime( * @param exprValue Day number N. * @return ExprValue. */ - private ExprValue exprFromDays(ExprValue exprValue) { + public static ExprValue exprFromDays(ExprValue exprValue) { return new ExprDateValue(LocalDate.ofEpochDay(exprValue.longValue() - DAYS_0000_TO_1970)); } - private ExprValue exprFromUnixTime(ExprValue time) { + public static ExprValue exprFromUnixTime(ExprValue time) { if (0 > time.doubleValue()) { return ExprNullValue.of(); } @@ -1504,7 +1493,7 @@ private LocalDateTime exprFromUnixTimeImpl(ExprValue time) { .withNano((int) ((time.doubleValue() % 1) * 1E9)); } - private ExprValue exprFromUnixTimeFormat(ExprValue time, ExprValue format) { + public static ExprValue exprFromUnixTimeFormat(ExprValue time, ExprValue format) { var value = exprFromUnixTime(time); if (value.equals(ExprNullValue.of())) { return ExprNullValue.of(); @@ -1519,10 +1508,12 @@ private ExprValue exprFromUnixTimeFormat(ExprValue time, ExprValue format) { * @param format ExprValue of Time/String type * @return ExprValue.. */ - private ExprValue exprGetFormat(ExprValue type, ExprValue format) { - if (formats.contains(type.stringValue().toLowerCase(), format.stringValue().toLowerCase())) { + public static ExprValue exprGetFormat(ExprValue type, ExprValue format) { + if (formats.contains( + type.stringValue().toLowerCase(), format.stringValue().toLowerCase(Locale.ROOT))) { return new ExprStringValue( - formats.get(type.stringValue().toLowerCase(), format.stringValue().toLowerCase())); + formats.get( + type.stringValue().toLowerCase(), format.stringValue().toLowerCase(Locale.ROOT))); } return ExprNullValue.of(); @@ -1534,7 +1525,7 @@ private ExprValue exprGetFormat(ExprValue type, ExprValue format) { * @param time ExprValue of Time/String type. * @return ExprValue. */ - private ExprValue exprHour(ExprValue time) { + public static ExprValue exprHour(ExprValue time) { return new ExprIntegerValue(HOURS.between(LocalTime.MIN, time.timeValue())); } @@ -1555,7 +1546,7 @@ private LocalDate getLastDay(LocalDate today) { * @param timestamp A DATE/TIMESTAMP/STRING ExprValue. * @return An DATE value corresponding to the last day of the month of the given argument. */ - private ExprValue exprLastDay(ExprValue timestamp) { + public static ExprValue exprLastDay(ExprValue timestamp) { return new ExprDateValue(getLastDay(timestamp.dateValue())); } @@ -1565,7 +1556,7 @@ private ExprValue exprLastDay(ExprValue timestamp) { * @param clock The clock for the query start time from functionProperties. * @return An DATE value corresponding to the last day of the month of the given argument. */ - private ExprValue exprLastDayToday(Clock clock) { + public static ExprValue exprLastDayToday(Clock clock) { return new ExprDateValue(getLastDay(formatNow(clock).toLocalDate())); } @@ -1584,7 +1575,7 @@ private ExprValue exprLastDayToday(Clock clock) { * @param dayOfYearExp day of the @year, starting from 1 * @return Date - ExprDateValue object with LocalDate */ - private ExprValue exprMakeDate(ExprValue yearExpr, ExprValue dayOfYearExp) { + public static ExprValue exprMakeDate(ExprValue yearExpr, ExprValue dayOfYearExp) { var year = Math.round(yearExpr.doubleValue()); var dayOfYear = Math.round(dayOfYearExp.doubleValue()); // We need to do this to comply with MySQL @@ -1606,7 +1597,8 @@ private ExprValue exprMakeDate(ExprValue yearExpr, ExprValue dayOfYearExp) { * @param secondExpr second * @return Time - ExprTimeValue object with LocalTime */ - private ExprValue exprMakeTime(ExprValue hourExpr, ExprValue minuteExpr, ExprValue secondExpr) { + public static ExprValue exprMakeTime( + ExprValue hourExpr, ExprValue minuteExpr, ExprValue secondExpr) { var hour = Math.round(hourExpr.doubleValue()); var minute = Math.round(minuteExpr.doubleValue()); var second = secondExpr.doubleValue(); @@ -1615,7 +1607,8 @@ private ExprValue exprMakeTime(ExprValue hourExpr, ExprValue minuteExpr, ExprVal } return new ExprTimeValue( LocalTime.parse( - String.format("%02d:%02d:%012.9f", hour, minute, second), DateTimeFormatter.ISO_TIME)); + String.format(Locale.US, "%02d:%02d:%012.9f", hour, minute, second), + DateTimeFormatter.ISO_TIME)); } /** @@ -1624,7 +1617,7 @@ private ExprValue exprMakeTime(ExprValue hourExpr, ExprValue minuteExpr, ExprVal * @param time ExprValue of Time/String type. * @return ExprValue. */ - private ExprValue exprMicrosecond(ExprValue time) { + public static ExprValue exprMicrosecond(ExprValue time) { return new ExprIntegerValue( TimeUnit.MICROSECONDS.convert(time.timeValue().getNano(), TimeUnit.NANOSECONDS)); } @@ -1635,7 +1628,7 @@ private ExprValue exprMicrosecond(ExprValue time) { * @param time ExprValue of Time/String type. * @return ExprValue. */ - private ExprValue exprMinute(ExprValue time) { + public static ExprValue exprMinute(ExprValue time) { return new ExprIntegerValue((MINUTES.between(LocalTime.MIN, time.timeValue()) % 60)); } @@ -1645,7 +1638,7 @@ private ExprValue exprMinute(ExprValue time) { * @param time ExprValue of Time/String type. * @return ExprValue. */ - private ExprValue exprMinuteOfDay(ExprValue time) { + public static ExprValue exprMinuteOfDay(ExprValue time) { return new ExprIntegerValue(MINUTES.between(LocalTime.MIN, time.timeValue())); } @@ -1655,7 +1648,7 @@ private ExprValue exprMinuteOfDay(ExprValue time) { * @param date ExprValue of Date/String type. * @return ExprValue. */ - private ExprValue exprMonth(ExprValue date) { + public static ExprValue exprMonth(ExprValue date) { return new ExprIntegerValue(date.dateValue().getMonthValue()); } @@ -1695,7 +1688,7 @@ private LocalDate parseDatePeriod(Integer period) { * @param months Amount of months to add. * @return ExprIntegerValue. */ - private ExprValue exprPeriodAdd(ExprValue period, ExprValue months) { + public static ExprValue exprPeriodAdd(ExprValue period, ExprValue months) { // We should add a day to make string parsable and remove it afterwards var input = period.integerValue() * 100 + 1; // adds 01 to end of the string var parsedDate = parseDatePeriod(input); @@ -1716,7 +1709,7 @@ private ExprValue exprPeriodAdd(ExprValue period, ExprValue months) { * @param period2 Period in the format YYMM or YYYYMM. * @return ExprIntegerValue. */ - private ExprValue exprPeriodDiff(ExprValue period1, ExprValue period2) { + public static ExprValue exprPeriodDiff(ExprValue period1, ExprValue period2) { var parsedDate1 = parseDatePeriod(period1.integerValue() * 100 + 1); var parsedDate2 = parseDatePeriod(period2.integerValue() * 100 + 1); if (parsedDate1 == null || parsedDate2 == null) { @@ -1731,7 +1724,7 @@ private ExprValue exprPeriodDiff(ExprValue period1, ExprValue period2) { * @param date ExprValue of Date/String type. * @return ExprValue. */ - private ExprValue exprQuarter(ExprValue date) { + public static ExprValue exprQuarter(ExprValue date) { int month = date.dateValue().getMonthValue(); return new ExprIntegerValue((month / 3) + ((month % 3) == 0 ? 0 : 1)); } @@ -1742,7 +1735,7 @@ private ExprValue exprQuarter(ExprValue date) { * @param totalSeconds The total number of seconds * @return A TIME value */ - private ExprValue exprSecToTime(ExprValue totalSeconds) { + public static ExprValue exprSecToTime(ExprValue totalSeconds) { return new ExprTimeValue(LocalTime.MIN.plus(Duration.ofSeconds(totalSeconds.longValue()))); } @@ -1769,7 +1762,7 @@ private long formatNanos(ExprValue seconds) { * @param totalSeconds The total number of seconds * @return A TIME value */ - private ExprValue exprSecToTimeWithNanos(ExprValue totalSeconds) { + public static ExprValue exprSecToTimeWithNanos(ExprValue totalSeconds) { long nanos = formatNanos(totalSeconds); return new ExprTimeValue( @@ -1782,7 +1775,7 @@ private ExprValue exprSecToTimeWithNanos(ExprValue totalSeconds) { * @param time ExprValue of Time/String type. * @return ExprValue. */ - private ExprValue exprSecond(ExprValue time) { + public static ExprValue exprSecond(ExprValue time) { return new ExprIntegerValue((SECONDS.between(LocalTime.MIN, time.timeValue()) % 60)); } @@ -1819,12 +1812,12 @@ private ExprValue exprSubDateInterval( * @param temporalDelta A Date/Time/Timestamp to subtract time from. * @return A value calculated. */ - private ExprValue exprSubTime( + public static ExprValue exprSubTime( FunctionProperties functionProperties, ExprValue temporal, ExprValue temporalDelta) { return exprApplyTime(functionProperties, temporal, temporalDelta, false); } - private ExprValue exprStrToDate( + public static ExprValue exprStrToDate( FunctionProperties fp, ExprValue dateTimeExpr, ExprValue formatStringExp) { return DateTimeFormatterUtil.parseStringWithDateOrTime(fp, dateTimeExpr, formatStringExp); } @@ -1835,7 +1828,7 @@ private ExprValue exprStrToDate( * @param exprValue ExprValue of Time type or String. * @return ExprValue. */ - private ExprValue exprTime(ExprValue exprValue) { + public static ExprValue exprTime(ExprValue exprValue) { if (exprValue instanceof ExprStringValue) { return new ExprTimeValue(exprValue.stringValue()); } else { @@ -1850,7 +1843,7 @@ private ExprValue exprTime(ExprValue exprValue) { * @param second The second value. * @return The diff. */ - private ExprValue exprTimeDiff(ExprValue first, ExprValue second) { + public static ExprValue exprTimeDiff(ExprValue first, ExprValue second) { // java inverses the value, so we have to swap 1 and 2 return new ExprTimeValue( LocalTime.MIN.plus(Duration.between(second.timeValue(), first.timeValue()))); @@ -1862,11 +1855,11 @@ private ExprValue exprTimeDiff(ExprValue first, ExprValue second) { * @param time ExprValue of Time/String type. * @return ExprValue. */ - private ExprValue exprTimeToSec(ExprValue time) { + public static ExprValue exprTimeToSec(ExprValue time) { return new ExprLongValue(time.timeValue().toSecondOfDay()); } - private ExprValue exprTimestampAdd( + public static ExprValue exprTimestampAdd( ExprValue partExpr, ExprValue amountExpr, ExprValue datetimeExpr) { String part = partExpr.stringValue(); int amount = amountExpr.integerValue(); @@ -1909,7 +1902,7 @@ private ExprValue exprTimestampAdd( return new ExprTimestampValue(timestamp.plus(amount, temporalUnit)); } - private ExprValue exprTimestampAddForTimeType( + public static ExprValue exprTimestampAddForTimeType( Clock clock, ExprValue partExpr, ExprValue amountExpr, ExprValue timeExpr) { LocalDateTime datetime = LocalDateTime.of(formatNow(clock).toLocalDate(), timeExpr.timeValue()); return exprTimestampAdd(partExpr, amountExpr, new ExprTimestampValue(datetime)); @@ -1951,7 +1944,7 @@ private ExprValue getTimeDifference(String part, LocalDateTime startTime, LocalD return new ExprLongValue(returnVal); } - private ExprValue exprTimestampDiff( + public static ExprValue exprTimestampDiff( ExprValue partExpr, ExprValue startTimeExpr, ExprValue endTimeExpr) { return getTimeDifference( partExpr.stringValue(), @@ -1959,7 +1952,7 @@ private ExprValue exprTimestampDiff( endTimeExpr.timestampValue().atZone(ZoneOffset.UTC).toLocalDateTime()); } - private ExprValue exprTimestampDiffForTimeType( + public static ExprValue exprTimestampDiffForTimeType( FunctionProperties fp, ExprValue partExpr, ExprValue startTimeExpr, ExprValue endTimeExpr) { return getTimeDifference( partExpr.stringValue(), @@ -1973,7 +1966,7 @@ private ExprValue exprTimestampDiffForTimeType( * @param functionProperties FunctionProperties. * @return ExprValue. */ - private ExprValue exprUtcDate(FunctionProperties functionProperties) { + public static ExprValue exprUtcDate(FunctionProperties functionProperties) { return new ExprDateValue(exprUtcTimeStamp(functionProperties).dateValue()); } @@ -1983,7 +1976,7 @@ private ExprValue exprUtcDate(FunctionProperties functionProperties) { * @param functionProperties FunctionProperties. * @return ExprValue. */ - private ExprValue exprUtcTime(FunctionProperties functionProperties) { + public static ExprValue exprUtcTime(FunctionProperties functionProperties) { return new ExprTimeValue(exprUtcTimeStamp(functionProperties).timeValue()); } @@ -1993,7 +1986,7 @@ private ExprValue exprUtcTime(FunctionProperties functionProperties) { * @param functionProperties FunctionProperties. * @return ExprValue. */ - private ExprValue exprUtcTimeStamp(FunctionProperties functionProperties) { + public static ExprValue exprUtcTimeStamp(FunctionProperties functionProperties) { var zdt = ZonedDateTime.now(functionProperties.getQueryStartClock()) .withZoneSameInstant(ZoneOffset.UTC); @@ -2006,7 +1999,7 @@ private ExprValue exprUtcTimeStamp(FunctionProperties functionProperties) { * @param date ExprValue of Date/String type. * @return ExprValue. */ - private ExprValue exprToDays(ExprValue date) { + public static ExprValue exprToDays(ExprValue date) { return new ExprLongValue(date.dateValue().toEpochDay() + DAYS_0000_TO_1970); } @@ -2016,7 +2009,7 @@ private ExprValue exprToDays(ExprValue date) { * @param date ExprValue of Date/Timestamp/String type. * @return ExprValue. */ - private ExprValue exprToSeconds(ExprValue date) { + public static ExprValue exprToSeconds(ExprValue date) { return new ExprLongValue( date.timestampValue().atOffset(ZoneOffset.UTC).toEpochSecond() + DAYS_0000_TO_1970 * SECONDS_PER_DAY); @@ -2070,7 +2063,7 @@ private DateTimeFormatter getFormatter(int dateAsInt) { * @param dateExpr ExprValue of an Integer/Long formatted for a date (e.g., 950501 = 1995-05-01) * @return ExprValue. */ - private ExprValue exprToSecondsForIntType(ExprValue dateExpr) { + public static ExprValue exprToSecondsForIntType(ExprValue dateExpr) { try { // Attempt to parse integer argument as date LocalDate date = @@ -2092,7 +2085,7 @@ private ExprValue exprToSecondsForIntType(ExprValue dateExpr) { * @param date ExprValue of Date/Timestamp/String type. * @param mode ExprValue of Integer type. */ - private ExprValue exprWeek(ExprValue date, ExprValue mode) { + public static ExprValue exprWeek(ExprValue date, ExprValue mode) { return new ExprIntegerValue( CalendarLookup.getWeekNumber(mode.integerValue(), date.dateValue())); } @@ -2103,15 +2096,15 @@ private ExprValue exprWeek(ExprValue date, ExprValue mode) { * @param date ExprValue of Date/String/Timstamp type. * @return ExprValue. */ - private ExprValue exprWeekday(ExprValue date) { + public static ExprValue exprWeekday(ExprValue date) { return new ExprIntegerValue(date.dateValue().getDayOfWeek().getValue() - 1); } - private ExprValue unixTimeStamp(Clock clock) { + public static ExprValue unixTimeStamp(Clock clock) { return new ExprLongValue(Instant.now(clock).getEpochSecond()); } - private ExprValue unixTimeStampOf(ExprValue value) { + public static ExprValue unixTimeStampOf(ExprValue value) { var res = unixTimeStampOfImpl(value); if (res == null) { return ExprNullValue.of(); @@ -2127,6 +2120,45 @@ private ExprValue unixTimeStampOf(ExprValue value) { return new ExprDoubleValue(res); } + public static Double transferUnixTimeStampFromDoubleInput(Double value) { + var format = new DecimalFormat("0.#"); + format.setMinimumFractionDigits(0); + format.setMaximumFractionDigits(6); + String input = format.format(value); + double fraction = 0; + if (input.contains(".")) { + // Keeping fraction second part and adding it to the result, don't parse it + // Because `toEpochSecond` returns only `long` + // input = 12345.6789 becomes input = 12345 and fraction = 0.6789 + fraction = value - Math.round(Math.ceil(value)); + input = input.substring(0, input.indexOf('.')); + } + try { + var res = LocalDateTime.parse(input, DATE_TIME_FORMATTER_SHORT_YEAR); + return res.toEpochSecond(ZoneOffset.UTC) + fraction; + } catch (DateTimeParseException ignored) { + // nothing to do, try another format + } + try { + var res = LocalDateTime.parse(input, DATE_TIME_FORMATTER_LONG_YEAR); + return res.toEpochSecond(ZoneOffset.UTC) + fraction; + } catch (DateTimeParseException ignored) { + // nothing to do, try another format + } + try { + var res = LocalDate.parse(input, DATE_FORMATTER_SHORT_YEAR); + return res.toEpochSecond(LocalTime.MIN, ZoneOffset.UTC) + 0d; + } catch (DateTimeParseException ignored) { + // nothing to do, try another format + } + try { + var res = LocalDate.parse(input, DATE_FORMATTER_LONG_YEAR); + return res.toEpochSecond(LocalTime.MIN, ZoneOffset.UTC) + 0d; + } catch (DateTimeParseException ignored) { + return null; + } + } + private Double unixTimeStampOfImpl(ExprValue value) { // Also, according to MySQL documentation: // The date argument may be a DATE, DATETIME, or TIMESTAMP ... @@ -2139,43 +2171,7 @@ private Double unixTimeStampOfImpl(ExprValue value) { // ... or a number in YYMMDD, YYMMDDhhmmss, YYYYMMDD, or YYYYMMDDhhmmss format. // If the argument includes a time part, it may optionally include a fractional // seconds part. - - var format = new DecimalFormat("0.#"); - format.setMinimumFractionDigits(0); - format.setMaximumFractionDigits(6); - String input = format.format(value.doubleValue()); - double fraction = 0; - if (input.contains(".")) { - // Keeping fraction second part and adding it to the result, don't parse it - // Because `toEpochSecond` returns only `long` - // input = 12345.6789 becomes input = 12345 and fraction = 0.6789 - fraction = value.doubleValue() - Math.round(Math.ceil(value.doubleValue())); - input = input.substring(0, input.indexOf('.')); - } - try { - var res = LocalDateTime.parse(input, DATE_TIME_FORMATTER_SHORT_YEAR); - return res.toEpochSecond(ZoneOffset.UTC) + fraction; - } catch (DateTimeParseException ignored) { - // nothing to do, try another format - } - try { - var res = LocalDateTime.parse(input, DATE_TIME_FORMATTER_LONG_YEAR); - return res.toEpochSecond(ZoneOffset.UTC) + fraction; - } catch (DateTimeParseException ignored) { - // nothing to do, try another format - } - try { - var res = LocalDate.parse(input, DATE_FORMATTER_SHORT_YEAR); - return res.toEpochSecond(LocalTime.MIN, ZoneOffset.UTC) + 0d; - } catch (DateTimeParseException ignored) { - // nothing to do, try another format - } - try { - var res = LocalDate.parse(input, DATE_FORMATTER_LONG_YEAR); - return res.toEpochSecond(LocalTime.MIN, ZoneOffset.UTC) + 0d; - } catch (DateTimeParseException ignored) { - return null; - } + return transferUnixTimeStampFromDoubleInput(value.doubleValue()); } } @@ -2196,7 +2192,7 @@ private ExprValue exprWeekWithoutMode(ExprValue date) { * @param date ExprValue of Date/String type. * @return ExprValue. */ - private ExprValue exprYear(ExprValue date) { + public static ExprValue exprYear(ExprValue date) { return new ExprIntegerValue(date.dateValue().getYear()); } @@ -2226,7 +2222,7 @@ private ExprIntegerValue extractYearweek(LocalDate date, int mode) { * @param date ExprValue of Date/Time/Timestamp/String type. * @param mode ExprValue of Integer type. */ - private ExprValue exprYearweek(ExprValue date, ExprValue mode) { + public static ExprValue exprYearweek(ExprValue date, ExprValue mode) { return extractYearweek(date.dateValue(), mode.integerValue()); } @@ -2241,15 +2237,15 @@ private ExprValue exprYearweekWithoutMode(ExprValue date) { return exprYearweek(date, new ExprIntegerValue(0)); } - private ExprValue yearweekToday(ExprValue mode, Clock clock) { + public static ExprValue yearweekToday(ExprValue mode, Clock clock) { return extractYearweek(LocalDateTime.now(clock).toLocalDate(), mode.integerValue()); } - private ExprValue monthOfYearToday(Clock clock) { + public static ExprValue monthOfYearToday(Clock clock) { return new ExprIntegerValue(LocalDateTime.now(clock).getMonthValue()); } - private LocalDateTime formatNow(Clock clock) { + public static LocalDateTime formatNow(Clock clock) { return formatNow(clock, 0); } @@ -2260,7 +2256,7 @@ private LocalDateTime formatNow(Clock clock) { * value includes a fractional seconds part of that many digits. * @return LocalDateTime object. */ - private LocalDateTime formatNow(Clock clock, Integer fsp) { + public static LocalDateTime formatNow(Clock clock, Integer fsp) { var res = LocalDateTime.now(clock); var defaultPrecision = 9; // There are 10^9 nanoseconds in one second if (fsp < 0 || fsp > 6) { // Check that the argument is in the allowed range [0, 6] diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/fallback/CalciteDateTimeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/fallback/CalciteDateTimeFunctionIT.java index 6f5b560658..604ba7a261 100644 --- a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/fallback/CalciteDateTimeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/fallback/CalciteDateTimeFunctionIT.java @@ -5,6 +5,8 @@ package org.opensearch.sql.calcite.remote.fallback; +import java.io.IOException; +import org.junit.Ignore; import org.opensearch.sql.ppl.DateTimeFunctionIT; public class CalciteDateTimeFunctionIT extends DateTimeFunctionIT { @@ -13,4 +15,10 @@ public void init() throws Exception { super.init(); enableCalcite(); } + + @Ignore("https://github.com/opensearch-project/sql/issues/3475") + @Override + public void testTimestampDiff() throws IOException { + super.testTimestampDiff(); + } } diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/fallback/CalciteLikeQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/fallback/CalciteLikeQueryIT.java index 9425df21ab..65512b8d01 100644 --- a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/fallback/CalciteLikeQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/fallback/CalciteLikeQueryIT.java @@ -5,15 +5,65 @@ package org.opensearch.sql.calcite.remote.fallback; +import java.io.IOException; import org.junit.Ignore; +import org.junit.Test; import org.opensearch.sql.ppl.LikeQueryIT; // TODO Like function behaviour in V2 is not correct. Remove when it was fixed in V2. -@Ignore("https://github.com/opensearch-project/sql/issues/3428") public class CalciteLikeQueryIT extends LikeQueryIT { @Override public void init() throws Exception { super.init(); enableCalcite(); } + + @Override + @Test + @Ignore("https://github.com/opensearch-project/sql/issues/3428") + public void test_like_with_escaped_percent() throws IOException, IOException { + super.test_like_with_escaped_percent(); + } + + @Override + @Test + @Ignore("https://github.com/opensearch-project/sql/issues/3428") + public void test_like_in_where_with_escaped_underscore() throws IOException { + super.test_like_in_where_with_escaped_underscore(); + } + + @Override + @Test + @Ignore("https://github.com/opensearch-project/sql/issues/3428") + public void test_like_on_text_field_with_one_word() throws IOException { + super.test_like_on_text_field_with_one_word(); + } + + @Override + @Test + @Ignore("https://github.com/opensearch-project/sql/issues/3428") + public void test_like_on_text_keyword_field_with_one_word() throws IOException { + super.test_like_on_text_keyword_field_with_one_word(); + } + + @Override + @Test + @Ignore("https://github.com/opensearch-project/sql/issues/3428") + public void test_like_on_text_keyword_field_with_greater_than_one_word() throws IOException { + super.test_like_on_text_keyword_field_with_greater_than_one_word(); + } + + @Override + @Test + @Ignore("https://github.com/opensearch-project/sql/issues/3428") + public void test_like_on_text_field_with_greater_than_one_word() throws IOException { + super.test_like_on_text_field_with_greater_than_one_word(); + } + + @Override + @Test + @Ignore("https://github.com/opensearch-project/sql/issues/3428") + public void test_convert_field_text_to_keyword() throws IOException { + super.test_convert_field_text_to_keyword(); + } } diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/fallback/CalciteStatsCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/fallback/CalciteStatsCommandIT.java index 352c96fdaf..94d056c85f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/fallback/CalciteStatsCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/fallback/CalciteStatsCommandIT.java @@ -5,6 +5,8 @@ package org.opensearch.sql.calcite.remote.fallback; +import java.io.IOException; +import org.junit.Ignore; import org.opensearch.sql.ppl.StatsCommandIT; public class CalciteStatsCommandIT extends StatsCommandIT { @@ -13,4 +15,10 @@ public void init() throws Exception { super.init(); enableCalcite(); } + + @Ignore("https://github.com/opensearch-project/sql/issues/3495") + @Override + public void testStatsPercentileByNullValue() throws IOException { + super.testStatsPercentileByNullValue(); + } } diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/fallback/CalciteTextFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/fallback/CalciteTextFunctionIT.java index 6ea1cc0e4f..787bc0e61c 100644 --- a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/fallback/CalciteTextFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/fallback/CalciteTextFunctionIT.java @@ -5,6 +5,8 @@ package org.opensearch.sql.calcite.remote.fallback; +import java.io.IOException; +import org.junit.Ignore; import org.opensearch.sql.ppl.TextFunctionIT; public class CalciteTextFunctionIT extends TextFunctionIT { @@ -13,4 +15,14 @@ public void init() throws Exception { super.init(); enableCalcite(); } + + @Ignore("https://github.com/opensearch-project/sql/issues/3481") + @Override + public void testStrcmp() throws IOException {} + ; + + @Ignore("https://github.com/opensearch-project/sql/issues/3481") + @Override + public void testLocate() throws IOException {} + ; } diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteConvertTZFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteConvertTZFunctionIT.java index 0a0e240dd3..b14abffe44 100644 --- a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteConvertTZFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteConvertTZFunctionIT.java @@ -5,10 +5,8 @@ package org.opensearch.sql.calcite.remote.nonfallback; -import org.junit.Ignore; import org.opensearch.sql.calcite.remote.fallback.CalciteConvertTZFunctionIT; -@Ignore("https://github.com/opensearch-project/sql/issues/3400") public class NonFallbackCalciteConvertTZFunctionIT extends CalciteConvertTZFunctionIT { @Override public void init() throws Exception { diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteDateTimeComparisonIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteDateTimeComparisonIT.java index 7a99fcab23..7724e966e3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteDateTimeComparisonIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteDateTimeComparisonIT.java @@ -5,10 +5,8 @@ package org.opensearch.sql.calcite.remote.nonfallback; -import org.junit.Ignore; import org.opensearch.sql.calcite.remote.fallback.CalciteDateTimeComparisonIT; -@Ignore("https://github.com/opensearch-project/sql/issues/3400") public class NonFallbackCalciteDateTimeComparisonIT extends CalciteDateTimeComparisonIT { public NonFallbackCalciteDateTimeComparisonIT( diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteDateTimeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteDateTimeFunctionIT.java index ad2bd268e8..0d4ed321ca 100644 --- a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteDateTimeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteDateTimeFunctionIT.java @@ -5,14 +5,20 @@ package org.opensearch.sql.calcite.remote.nonfallback; +import java.io.IOException; import org.junit.Ignore; import org.opensearch.sql.calcite.remote.fallback.CalciteDateTimeFunctionIT; -@Ignore("https://github.com/opensearch-project/sql/issues/3400") public class NonFallbackCalciteDateTimeFunctionIT extends CalciteDateTimeFunctionIT { @Override public void init() throws Exception { super.init(); disallowCalciteFallback(); } + + @Ignore("https://github.com/opensearch-project/sql/issues/3475") + @Override + public void testTimestampDiff() throws IOException { + super.testTimestampDiff(); + } } diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteDateTimeImplementationIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteDateTimeImplementationIT.java index 46aef794f5..e1de247846 100644 --- a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteDateTimeImplementationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteDateTimeImplementationIT.java @@ -5,10 +5,8 @@ package org.opensearch.sql.calcite.remote.nonfallback; -import org.junit.Ignore; import org.opensearch.sql.calcite.remote.fallback.CalciteDateTimeImplementationIT; -@Ignore("https://github.com/opensearch-project/sql/issues/3400") public class NonFallbackCalciteDateTimeImplementationIT extends CalciteDateTimeImplementationIT { @Override public void init() throws Exception { diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteNowLikeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteNowLikeFunctionIT.java index dda185384c..344f533ab5 100644 --- a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteNowLikeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteNowLikeFunctionIT.java @@ -9,10 +9,8 @@ import java.time.temporal.Temporal; import java.util.function.BiFunction; import java.util.function.Supplier; -import org.junit.Ignore; import org.opensearch.sql.calcite.remote.fallback.CalciteNowLikeFunctionIT; -@Ignore("https://github.com/opensearch-project/sql/issues/3400") public class NonFallbackCalciteNowLikeFunctionIT extends CalciteNowLikeFunctionIT { public NonFallbackCalciteNowLikeFunctionIT( String name, diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteStatsCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteStatsCommandIT.java index b0d8dccb64..b1e65dcf13 100644 --- a/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteStatsCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/remote/nonfallback/NonFallbackCalciteStatsCommandIT.java @@ -5,6 +5,8 @@ package org.opensearch.sql.calcite.remote.nonfallback; +import static org.opensearch.sql.util.MatcherUtils.*; + import java.io.IOException; import org.junit.Ignore; import org.opensearch.sql.calcite.remote.fallback.CalciteStatsCommandIT; @@ -17,42 +19,12 @@ public void init() throws Exception { disallowCalciteFallback(); } - @Ignore("Percentile is unsupported in Calcite now") - @Override - public void testStatsPercentile() throws IOException { - super.testStatsPercentile(); - } - - @Ignore("Percentile is unsupported in Calcite now") - @Override - public void testStatsPercentileWithNull() throws IOException { - super.testStatsPercentileWithNull(); - } - - @Ignore("Percentile is unsupported in Calcite now") - @Override - public void testStatsPercentileWithCompression() throws IOException { - super.testStatsPercentileWithCompression(); - } - - @Ignore("Percentile is unsupported in Calcite now") - @Override - public void testStatsPercentileWhere() throws IOException { - super.testStatsPercentileWhere(); - } - - @Ignore("Percentile is unsupported in Calcite now") + @Ignore("https://github.com/opensearch-project/sql/issues/3495") @Override public void testStatsPercentileByNullValue() throws IOException { super.testStatsPercentileByNullValue(); } - @Ignore("Percentile is unsupported in Calcite now") - @Override - public void testStatsPercentileBySpan() throws IOException { - super.testStatsPercentileBySpan(); - } - @Override public void testStatsTimeSpan() throws IOException { super.testStatsTimeSpan(); diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLAggregationIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLAggregationIT.java index 868c1da2cd..4742be1bef 100644 --- a/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLAggregationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLAggregationIT.java @@ -601,6 +601,17 @@ public void testTake() { verifyDataRows(actual, rows(List.of("Amber JOHnny", "Hattie"))); } + @Test + public void testPercentile() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | stats percentile(balance, 50) as p50, percentile(balance, 90) as p90", + TEST_INDEX_BANK)); + verifySchema(actual, schema("p50", "long"), schema("p90", "long")); + verifyDataRows(actual, rows(32838, 48086)); + } + @Test public void testSumGroupByNullValue() throws IOException { JSONObject response = diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBasicIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBasicIT.java index e1932b7ce1..8c0cdd79b5 100644 --- a/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBasicIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBasicIT.java @@ -14,7 +14,6 @@ import java.io.IOException; import org.json.JSONObject; -import org.junit.Ignore; import org.junit.jupiter.api.Test; import org.opensearch.client.Request; import org.opensearch.sql.exception.SemanticCheckException; @@ -494,7 +493,6 @@ public void testNotBetween3() { verifyDataRows(actual, rows("Hattie", 36), rows("Elinor", 36)); } - @Ignore("https://github.com/opensearch-project/sql/issues/3400") public void testDateBetween() { JSONObject actual = executeQuery( diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBuiltinDatetimeFunctionInvalidIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBuiltinDatetimeFunctionInvalidIT.java new file mode 100644 index 0000000000..3bf165f9d2 --- /dev/null +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBuiltinDatetimeFunctionInvalidIT.java @@ -0,0 +1,1964 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.standalone; + +import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATE_FORMATS_WITH_NULL; +import static org.opensearch.sql.util.MatcherUtils.verifyErrorMessageContains; + +import java.io.IOException; +import org.json.JSONObject; +import org.junit.jupiter.api.Test; +import org.opensearch.sql.exception.SemanticCheckException; +import org.opensearch.sql.legacy.SQLIntegTestCase; + +public class CalcitePPLBuiltinDatetimeFunctionInvalidIT extends CalcitePPLIntegTestCase { + @Override + public void init() throws IOException { + super.init(); + loadIndex(SQLIntegTestCase.Index.STATE_COUNTRY); + loadIndex(SQLIntegTestCase.Index.STATE_COUNTRY_WITH_NULL); + loadIndex(SQLIntegTestCase.Index.DATE_FORMATS); + loadIndex(SQLIntegTestCase.Index.DATE_FORMATS_WITH_NULL); + } + + @Test + public void testYearWeekInvalid() { + SemanticCheckException e = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval `YEARWEEK('2020-08-26')` = YEARWEEK('2020-15-26')", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e, "unsupported format"); + } + + @Test + public void testYearInvalid() { + SemanticCheckException e = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a = YEAR('2020-15-26')", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e, "unsupported format"); + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a = YEAR('2020-12-26 25:00:00')", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + } + + @Test + public void testWeekInvalid() { + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a = WEEK('2020-15-26')", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a = WEEK('2020-12-26 25:00:00')", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + } + + @Test + public void testTO_SECONDSInvalid() { + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TO_SECONDS('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TO_SECONDS('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TO_SECONDS('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testDATEInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DATE('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DATE('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DATE('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testTIMEInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIME('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIME('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIME('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testDAYInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAY('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAY('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAY('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testDAYNAMEInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAYNAME('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "Unable to parse"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAYNAME('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "Unable to parse"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAYNAME('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "Unable to parse"); + } + + @Test + public void testDAYOFMONTHInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAYOFMONTH('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAYOFMONTH('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAYOFMONTH('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testDAY_OF_MONTHInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAY_OF_MONTH('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAY_OF_MONTH('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAY_OF_MONTH('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testDAYOFWEEKInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAYOFWEEK('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAYOFWEEK('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAYOFWEEK('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + + SemanticCheckException e4 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAYOFWEEK('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e4, "unsupported format"); + + SemanticCheckException e5 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAYOFWEEK('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e5, "unsupported format"); + + SemanticCheckException e6 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAYOFWEEK('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e6, "unsupported format"); + } + + @Test + public void testDAY_OF_WEEKInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAY_OF_WEEK('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAY_OF_WEEK('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAY_OF_WEEK('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testDAYOFYEARInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAYOFYEAR('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAYOFYEAR('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAYOFYEAR('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testDAY_OF_YEARInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAY_OF_YEAR('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAY_OF_YEAR('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DAY_OF_YEAR('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testHOURInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=HOUR('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=HOUR('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=HOUR('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testHOUR_OF_DAYInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=HOUR_OF_DAY('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=HOUR_OF_DAY('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=HOUR_OF_DAY('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testLAST_DAYInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=LAST_DAY('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=LAST_DAY('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=LAST_DAY('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testMINUTEInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=MINUTE('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=MINUTE('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=MINUTE('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testMINUTE_OF_DAYInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=MINUTE_OF_DAY('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=MINUTE_OF_DAY('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=MINUTE_OF_DAY('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testMINUTE_OF_HOURInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=MINUTE_OF_HOUR('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=MINUTE_OF_HOUR('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=MINUTE_OF_HOUR('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testMONTHInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=MONTH('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=MONTH('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=MONTH('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testMONTH_OF_YEARInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=MONTH_OF_YEAR('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=MONTH_OF_YEAR('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=MONTH_OF_YEAR('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testMONTHNAMEInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=MONTHNAME('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "Unable to parse"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=MONTHNAME('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "Unable to parse"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=MONTHNAME('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "Unable to parse"); + } + + @Test + public void testQUARTERInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=QUARTER('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=QUARTER('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=QUARTER('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testSECONDInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=SECOND('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=SECOND('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=SECOND('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testSECOND_OF_MINUTEInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=SECOND_OF_MINUTE('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=SECOND_OF_MINUTE('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=SECOND_OF_MINUTE('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testTIME_TO_SECInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIME_TO_SEC('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIME_TO_SEC('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIME_TO_SEC('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testTIMESTAMPInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIMESTAMP('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIMESTAMP('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIMESTAMP('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + + SemanticCheckException e4 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIMESTAMP('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e4, "unsupported format"); + + SemanticCheckException e5 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIMESTAMP('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e5, "unsupported format"); + + SemanticCheckException e6 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIMESTAMP('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e6, "unsupported format"); + + SemanticCheckException e7 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIMESTAMP('2025-13-02', '2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e7, "unsupported format"); + + SemanticCheckException e8 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIMESTAMP('16:00:61', '16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e8, "unsupported format"); + + SemanticCheckException e9 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIMESTAMP('2025-12-01 15:02:61', '2025-12-01" + + " 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e9, "unsupported format"); + } + + @Test + public void testTO_DAYSInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TO_DAYS('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TO_DAYS('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TO_DAYS('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testYEARInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=YEAR('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=YEAR('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=YEAR('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testWEEKInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=WEEK('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=WEEK('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=WEEK('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testWEEK_OF_YEARInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=WEEK_OF_YEAR('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=WEEK_OF_YEAR('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=WEEK_OF_YEAR('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testWEEKDAYInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=WEEKDAY('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=WEEKDAY('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=WEEKDAY('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testYEARWEEKInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=YEARWEEK('2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=YEARWEEK('16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=YEARWEEK('2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testADDTATEInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=ADDDATE('2025-13-02', INTERVAL 1 HOUR) | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=ADDDATE('16:00:61', INTERVAL 1 HOUR) | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=ADDDATE('2025-12-01 15:02:61', INTERVAL 1 HOUR) |" + + " fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + + SemanticCheckException e4 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=ADDDATE('2025-13-02', 1) | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e4, "unsupported format"); + + SemanticCheckException e5 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=ADDDATE('16:00:61', 1) | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e5, "unsupported format"); + + SemanticCheckException e6 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=ADDDATE('2025-12-01 15:02:61', 1) | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e6, "unsupported format"); + } + + @Test + public void testADDTIMEInvalid() { + + IllegalArgumentException e1 = + assertThrows( + IllegalArgumentException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=ADDTIME('2025-13-02', '2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "Unsupported type: "); + + IllegalArgumentException e2 = + assertThrows( + IllegalArgumentException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=ADDTIME('16:00:61', '16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "Unsupported type: "); + + IllegalArgumentException e3 = + assertThrows( + IllegalArgumentException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=ADDTIME('2025-12-01 15:02:61', '2025-12-01" + + " 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "Unsupported type: "); + } + + @Test + public void testDATE_ADDInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DATE_ADD('2025-13-02', INTERVAL 1 HOUR) | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DATE_ADD('16:00:61', INTERVAL 1 HOUR) | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DATE_ADD('2025-12-01 15:02:61', INTERVAL 1 HOUR) |" + + " fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testDATE_SUBInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DATE_SUB('2025-13-02', INTERVAL 1 HOUR) | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DATE_SUB('16:00:61', INTERVAL 1 HOUR) | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DATE_SUB('2025-12-01 15:02:61', INTERVAL 1 HOUR) |" + + " fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testDATEDIFFInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DATEDIFF('2025-13-02', '2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DATEDIFF('16:00:61', '16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DATEDIFF('2025-12-01 15:02:61', '2025-12-01" + + " 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testSUBDATEInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=SUBDATE('2025-13-02', INTERVAL 1 HOUR) | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=SUBDATE('16:00:61', INTERVAL 1 HOUR) | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=SUBDATE('2025-12-01 15:02:61', INTERVAL 1 HOUR) |" + + " fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + + SemanticCheckException e4 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=SUBDATE('2025-13-02', 1) | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e4, "unsupported format"); + + SemanticCheckException e5 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=SUBDATE('16:00:61', 1) | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e5, "unsupported format"); + + SemanticCheckException e6 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=SUBDATE('2025-12-01 15:02:61', 1) | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e6, "unsupported format"); + } + + @Test + public void testSUBTIMEInvalid() { + IllegalArgumentException e1 = + assertThrows( + IllegalArgumentException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=SUBTIME('2025-13-02', '2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "Unsupported type:"); + + IllegalArgumentException e2 = + assertThrows( + IllegalArgumentException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=SUBTIME('16:00:61', '16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "Unsupported type:"); + + IllegalArgumentException e3 = + assertThrows( + IllegalArgumentException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=SUBTIME('2025-12-01 15:02:61', '2025-12-01" + + " 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "Unsupported type:"); + } + + @Test + public void testTIMESTAMPADDInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIMESTAMPADD(HOUR, 1, '2025-13-02') | fields" + + " a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIMESTAMPADD(HOUR, 1, '16:00:61') | fields" + " a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIMESTAMPADD(HOUR, 1, '2025-12-01 15:02:61')" + + " | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testTIMESTAMPDIFFInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIMESTAMPDIFF(HOUR, '2025-13-02'," + + " '2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIMESTAMPDIFF(HOUR, '16:00:61', '16:00:61')" + + " | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIMESTAMPDIFF(HOUR, '2025-12-01 15:02:61'," + + " '2025-12-01 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testDATE_FORMATInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DATE_FORMAT('2025-13-02', '2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DATE_FORMAT('16:00:61', '16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=DATE_FORMAT('2025-12-01 15:02:61', '2025-12-01" + + " 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } + + @Test + public void testTIME_FORMATInvalid() { + + SemanticCheckException e1 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIME_FORMAT('2025-13-02', '2025-13-02') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e1, "unsupported format"); + + SemanticCheckException e2 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIME_FORMAT('16:00:61', '16:00:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e2, "unsupported format"); + + SemanticCheckException e3 = + assertThrows( + SemanticCheckException.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a=TIME_FORMAT('2025-12-01 15:02:61', '2025-12-01" + + " 15:02:61') | fields a", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + verifyErrorMessageContains(e3, "unsupported format"); + } +} diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBuiltinDatetimeFunctionInvalidPushdownIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBuiltinDatetimeFunctionInvalidPushdownIT.java new file mode 100644 index 0000000000..fd141ddd90 --- /dev/null +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBuiltinDatetimeFunctionInvalidPushdownIT.java @@ -0,0 +1,16 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.standalone; + +import org.opensearch.sql.common.setting.Settings; + +public class CalcitePPLBuiltinDatetimeFunctionInvalidPushdownIT + extends CalcitePPLBuiltinDatetimeFunctionInvalidIT { + @Override + protected Settings getSettings() { + return enablePushdown(); + } +} diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBuiltinFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBuiltinFunctionIT.java index 7a81d12492..c5a03757a8 100644 --- a/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBuiltinFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBuiltinFunctionIT.java @@ -2,6 +2,7 @@ * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 */ + package org.opensearch.sql.calcite.standalone; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATATYPE_NUMERIC; @@ -17,7 +18,6 @@ import java.io.IOException; import org.json.JSONObject; -import org.junit.Ignore; import org.junit.jupiter.api.Test; public class CalcitePPLBuiltinFunctionIT extends CalcitePPLIntegTestCase { @@ -130,7 +130,6 @@ public void testTypeOfBasic() { result, rows("INT", "BOOLEAN", "DOUBLE", "STRING", "STRING", "STRING", "INT", "INTERVAL")); } - @Ignore("https://github.com/opensearch-project/sql/issues/3400") public void testTypeOfDateTime() { JSONObject result = executeQuery( diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBuiltinFunctionsNullIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBuiltinFunctionsNullIT.java new file mode 100644 index 0000000000..b2ed6696e2 --- /dev/null +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBuiltinFunctionsNullIT.java @@ -0,0 +1,1012 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.standalone; + +import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATE_FORMATS_WITH_NULL; +import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_NULL_MISSING; +import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_STATE_COUNTRY_WITH_NULL; +import static org.opensearch.sql.util.MatcherUtils.*; +import static org.opensearch.sql.util.MatcherUtils.rows; + +import java.io.IOException; +import org.json.JSONArray; +import org.json.JSONObject; +import org.junit.Ignore; +import org.junit.jupiter.api.Test; +import org.opensearch.sql.exception.SemanticCheckException; + +public class CalcitePPLBuiltinFunctionsNullIT extends CalcitePPLIntegTestCase { + @Override + public void init() throws IOException { + super.init(); + loadIndex(Index.STATE_COUNTRY); + loadIndex(Index.STATE_COUNTRY_WITH_NULL); + loadIndex(Index.DATE_FORMATS); + loadIndex(Index.DATE_FORMATS_WITH_NULL); + loadIndex(Index.NULL_MISSING); + } + + @Test + public void testYearWeekInvalid() { + assertThrows( + Exception.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval `YEARWEEK('2020-08-26')` = YEARWEEK('2020-15-26')", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + } + + @Test + public void testYearWeekNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval NullValue = YEARWEEK(date) | fields NullValue", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + + verifySchema(actual, schema("NullValue", "integer")); + JSONArray ret = actual.getJSONArray("datarows"); + for (int i = 0; i < ret.length(); i++) { + Object o = ((JSONArray) ret.get(i)).get(0); + assertEquals(JSONObject.NULL, o); + } + } + + @Test + public void testYearInvalid() { + assertThrows( + Exception.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a = YEAR('2020-15-26')", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + assertThrows( + Exception.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a = YEAR('2020-12-26 25:00:00')", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + } + + @Test + public void testWeekInvalid() { + assertThrows( + Exception.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a = WEEK('2020-15-26')", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + assertThrows( + Exception.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a = WEEK('2020-12-26 25:00:00')", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + } + + @Test + public void testWeekDayInvalid() { + assertThrows( + Exception.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a = WEEKDAY('2020-15-26')", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + assertThrows( + Exception.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a = WEEKDAY('2020-12-26 25:00:00')", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + + assertThrows( + Exception.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a = WEEKDAY('25:00:00')", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + } + + @Test + public void testWeekDayNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval timestamp = WEEKDAY(strict_date_optional_time)," + + " date=WEEKDAY(date), time=WEEKDAY(time) | fields timestamp, date, time", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + + verifySchema( + actual, + schema("timestamp", "integer"), + schema("date", "integer"), + schema("time", "integer")); + JSONArray ret = (JSONArray) actual.getJSONArray("datarows").get(0); + for (int i = 0; i < ret.length(); i++) { + assertEquals(JSONObject.NULL, ret.get(i)); + } + } + + @Test + public void testUnixTimestampInvalid() { + assertThrows( + Exception.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a = UNIX_TIMESTAMP('2020-15-26')", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + assertThrows( + Exception.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a = UNIX_TIMESTAMP('2020-12-26 25:00:00')", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + } + + @Test + public void testUnixTimestampNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval timestamp = UNIX_TIMESTAMP(strict_date_optional_time)," + + " date=UNIX_TIMESTAMP(date), time=UNIX_TIMESTAMP(time) | fields timestamp," + + " date, time", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + + verifySchema( + actual, schema("timestamp", "double"), schema("date", "double"), schema("time", "double")); + JSONArray ret = (JSONArray) actual.getJSONArray("datarows").get(0); + for (int i = 0; i < ret.length(); i++) { + assertEquals(JSONObject.NULL, ret.get(i)); + } + } + + @Test + public void testToSecondsInvalid() { + assertThrows( + Exception.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a = UNIX_TIMESTAMP('2020-15-26')", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + assertThrows( + Exception.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a = UNIX_TIMESTAMP('2020-12-26 25:00:00')", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + } + + @Test + public void testToSecondsNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval timestamp = SECOND(strict_date_optional_time)," + + " date=SECOND(date) | fields timestamp, date", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + + verifySchema(actual, schema("timestamp", "integer"), schema("date", "integer")); + JSONArray ret = (JSONArray) actual.getJSONArray("datarows").get(0); + for (int i = 0; i < ret.length(); i++) { + assertEquals(JSONObject.NULL, ret.get(i)); + } + } + + @Test + public void testDatetimeInvalid() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval timestamp = DATETIME('2025-12-01 15:02:61')," + + " date=DATETIME('2025-12-02'), time=DATETIME('16:00:61'), convert1=" + + " DATETIME('2025-12-01 12:02:61') | fields timestamp, date, time, convert1", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + + verifySchema( + actual, + schema("timestamp", "timestamp"), + schema("date", "timestamp"), + schema("time", "timestamp"), + schema("convert1", "timestamp")); + JSONArray ret = (JSONArray) actual.getJSONArray("datarows").get(0); + for (int i = 0; i < ret.length(); i++) { + assertEquals(JSONObject.NULL, ret.get(i)); + } + } + + @Test + public void testStrTDateInvalid1() { + assertThrows( + Exception.class, + () -> { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a = str_to_date('01,13,2013', '%%d,%%m,%%Y')", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + }); + } + + @Test + public void testStrTDateInvalid2() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval timestamp = STR_TO_DATE('2025-13-02', '2025-13-02')" + + "| fields timestamp", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + + verifySchema(actual, schema("timestamp", "timestamp")); + JSONArray ret = (JSONArray) actual.getJSONArray("datarows").get(0); + for (int i = 0; i < ret.length(); i++) { + assertEquals(JSONObject.NULL, ret.get(i)); + } + } + + @Test + public void testConvertTZInvalid() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a =CONVERT_TZ('2025-13-02', '+10:00', '-10:00'), b" + + " =CONVERT_TZ('2025-10-02', '+10:00', '-10:00'), c =CONVERT_TZ('2025-12-02" + + " 10:61:61', '+10:00', '-10:00'), d = CONVERT_TZ('2025-12-02 12:61:61'," + + " '+10:00:00', '-10:00')| fields a, b, c, d", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + + verifySchema( + actual, + schema("a", "timestamp"), + schema("b", "timestamp"), + schema("c", "timestamp"), + schema("d", "timestamp")); + JSONArray ret = (JSONArray) actual.getJSONArray("datarows").get(0); + for (int i = 0; i < ret.length(); i++) { + assertEquals(JSONObject.NULL, ret.get(i)); + } + } + + @Test + public void testAddSubDateNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval n1 = ADDDATE(date_time, INTERVAL 1 DAY), " + + "n2 = ADDDATE(date, 1), n3 = SUBDATE(time, 1) | fields n1, n2, n3", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + + verifySchema( + actual, schema("n1", "timestamp"), schema("n2", "date"), schema("n3", "timestamp")); + verifyDataRows(actual, rows(null, null, null)); + } + + /** + * (DATE/TIMESTAMP, DATE/TIMESTAMP/TIME) -> TIMESTAMP + * + *

(TIME, DATE/TIMESTAMP/TIME) -> TIME + */ + @Test + public void testAddTimeNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval n1 = ADDTIME(date_time, date_time), " + + "n2 = ADDTIME(date, date), n3 = ADDTIME(time, time) | fields n1, n2, n3", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema( + actual, schema("n1", "timestamp"), schema("n2", "timestamp"), schema("n3", "time")); + verifyDataRows(actual, rows(null, null, null)); + } + + /** (DATE/TIMESTAMP/TIME, INTERVAL) -> TIMESTAMP */ + @Test + public void testDateAddSubNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval n1 = DATE_ADD(date_time, INTERVAL 1 DAY), n2 = DATE_ADD(date," + + " INTERVAL 1 DAY), n3 = DATE_SUB(time, INTERVAL 1 DAY) | fields n1, n2, n3", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema( + actual, schema("n1", "timestamp"), schema("n2", "timestamp"), schema("n3", "timestamp")); + verifyDataRows(actual, rows(null, null, null)); + } + + /* + STRING/DATE/TIMESTAMP + */ + @Test + public void testDateNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval d1 = DATE(date), d2 = DATE(date_time) | fields d1, d2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + + verifySchema(actual, schema("d1", "date"), schema("d2", "date")); + + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testDateInvalid() { + Exception semanticException = + assertThrows( + SemanticCheckException.class, + () -> + executeQuery( + String.format( + "source=%s | eval d1 = DATE('2020-08-26'), d2 = DATE('2020-15-26') |" + + " fields d1, d2", + TEST_INDEX_DATE_FORMATS_WITH_NULL))); + verifyErrorMessageContains( + semanticException, "date:2020-15-26 in unsupported format, please use 'yyyy-MM-dd'"); + } + + /** STRING/TIME/TIMESTAMP -> INTEGER */ + @Test + public void testHourNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval h2 = HOUR(date_time), h3 = HOUR(time) | fields h2, h3", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("h2", "integer"), schema("h3", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testHourInvalid() { + Exception semanticException = + assertThrows( + SemanticCheckException.class, + () -> + executeQuery( + String.format( + "source=%s | eval h1 = HOUR('2020-08-26') | fields h1", + TEST_INDEX_DATE_FORMATS_WITH_NULL))); + verifyErrorMessageContains( + semanticException, + "time:2020-08-26 in unsupported format, please use 'HH:mm:ss[.SSSSSSSSS]'"); + } + + @Test + public void testDayInvalid() { + Exception malformMonthException = + assertThrows( + SemanticCheckException.class, + () -> + executeQuery( + String.format( + "source=%s | eval d1 = DAY('2020-13-26') | fields d1", + TEST_INDEX_DATE_FORMATS_WITH_NULL))); + verifyErrorMessageContains( + malformMonthException, "date:2020-13-26 in unsupported format, please use 'yyyy-MM-dd'"); + + Exception dateAsTimeException = + assertThrows( + SemanticCheckException.class, + () -> + executeQuery( + String.format( + "source=%s | eval d2 = DAY('12:00:00') | fields d2", + TEST_INDEX_DATE_FORMATS_WITH_NULL))); + verifyErrorMessageContains( + dateAsTimeException, "date:12:00:00 in unsupported format, please use 'yyyy-MM-dd'"); + } + + @Test + public void testTimeInvalid() { + assertThrows( + SemanticCheckException.class, + () -> + executeQuery( + String.format( + "source=%s | eval t1 = TIME('13:69:00') | fields t1", + TEST_INDEX_DATE_FORMATS_WITH_NULL))); + } + + @Test + public void testDayOfWeekNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval d1 = DAY_OF_WEEK(date), d2 = DAYOFWEEK(date_time) | fields d1," + + " d2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("d1", "integer"), schema("d2", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testDayOfYearNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval d1 = DAY_OF_YEAR(date), d2 = DAYOFYEAR(date_time) | fields d1," + + " d2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("d1", "integer"), schema("d2", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testExtractNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval e1 = EXTRACT(YEAR FROM date), e2 = EXTRACT(MONTH FROM date_time)," + + " e3 = EXTRACT(HOUR FROM time) | fields e1, e2, e3", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("e1", "long"), schema("e2", "long"), schema("e3", "long")); + verifyDataRows(actual, rows(null, null, null)); + } + + @Test + public void testFromDaysNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval from1 = FROM_DAYS(TO_DAYS(date)), from2 =" + + " FROM_DAYS(TO_DAYS(date_time)) | fields from1, from2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("from1", "date"), schema("from2", "date")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testFromUnixtimeNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval f1 = FROM_UNIXTIME(UNIX_TIMESTAMP(date_time)), f2 =" + + " FROM_UNIXTIME(UNIX_TIMESTAMP(date)) | fields f1, f2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("f1", "timestamp"), schema("f2", "timestamp")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testHourOfDayNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval h1 = HOUR_OF_DAY(time), h2 = HOUR_OF_DAY(date_time) | fields h1," + + " h2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("h1", "integer"), schema("h2", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testLastDayNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval l1 = LAST_DAY(date), l2 = LAST_DAY(date_time) | fields l1, l2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("l1", "date"), schema("l2", "date")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testMakedateNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval mk1 = MAKEDATE(YEAR(date), DAYOFYEAR(date)), mk2 =" + + " MAKEDATE(YEAR(date_time), DAYOFYEAR(date_time)) | fields mk1, mk2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("mk1", "date"), schema("mk2", "date")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testMaketimeNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval mt1 = MAKETIME(HOUR(date_time), MINUTE(date_time)," + + " SECOND(date_time)), mt2 = MAKETIME(HOUR(time), MINUTE(time), SECOND(time))" + + " | fields mt1, mt2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("mt1", "time"), schema("mt2", "time")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testAdddateNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval a1 = ADDDATE(date, 3), a2 = ADDDATE(date_time, 3) | fields a1," + + " a2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("a1", "date"), schema("a2", "timestamp")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testAddtimeNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval n1 = ADDTIME(date_time, date_time), n2 = ADDTIME(date, date), n3" + + " = ADDTIME(time, time) | fields n1, n2, n3", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema( + actual, schema("n1", "timestamp"), schema("n2", "timestamp"), schema("n3", "time")); + verifyDataRows(actual, rows(null, null, null)); + } + + @Test + public void testConvertTzNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval c1 = CONVERT_TZ(date, '+00:00', '+08:00'), c2 = CONVERT_TZ(date," + + " '-03:00', '+01:30') | fields c1, c2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("c1", "timestamp"), schema("c2", "timestamp")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testDateAddNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval da1 = DATE_ADD(date, INTERVAL 1 DAY), da2 = DATE_ADD(date_time," + + " interval 5 month) | fields da1, da2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("da1", "timestamp"), schema("da2", "timestamp")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testDateFormatNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval df1 = DATE_FORMAT(date, 'yyyy-MM-dd'), df2 =" + + " DATE_FORMAT(date_time, 'yyyy-MM-dd HH:mm:ss') | fields df1, df2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("df1", "string"), schema("df2", "string")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testDateSubNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval ds1 = DATE_SUB(date, INTERVAL 1 DAY), ds2 = DATE_SUB(date_time," + + " interval 5 month) | fields ds1, ds2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("ds1", "timestamp"), schema("ds2", "timestamp")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testDatediffNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval diff1 = DATEDIFF(date, date), diff2 = DATEDIFF(date_time," + + " date_time) | fields diff1, diff2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("diff1", "long"), schema("diff2", "long")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testDatetimeNullString() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | where age = 10 | eval d1 = DATETIME(name, '+10:00'), d2 =" + + " datetime('2004-02-28 23:00:00-10:00', state)| fields d1, d2", + TEST_INDEX_STATE_COUNTRY_WITH_NULL)); + verifySchema(actual, schema("d1", "timestamp"), schema("d2", "timestamp")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testDatetimeNullTimestamp() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval d1 = DATETIME(date_time) | fields d1", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("d1", "timestamp")); + verifyDataRows(actual, rows((Object) null)); + } + + @Test + public void testDayNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval d1 = DAY(date), d2 = DAY(date_time) | fields d1, d2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("d1", "integer"), schema("d2", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testDaynameNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval d1 = DAYNAME(date), d2 = DAYNAME(date_time) | fields d1, d2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("d1", "string"), schema("d2", "string")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testDayOfMonthNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval d1 = DAY_OF_MONTH(date), d2 = DAYOFMONTH(date_time) | fields d1," + + " d2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("d1", "integer"), schema("d2", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Ignore + @Test + public void testMicrosecondNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval m1 = MICROSECOND(time), m2 = MICROSECOND(date_time) | fields m1," + + " m2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("m1", "integer"), schema("m2", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testMinuteNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval m1 = MINUTE(time), m2 = MINUTE(date_time) | fields m1, m2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("m1", "integer"), schema("m2", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testMinuteOfDayNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval md1 = MINUTE_OF_DAY(time), md2 = MINUTE_OF_DAY(date_time) |" + + " fields md1, md2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("md1", "integer"), schema("md2", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testMinuteOfHourNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval mh1 = MINUTE_OF_HOUR(time), mh2 = MINUTE_OF_HOUR(date_time) |" + + " fields mh1, mh2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("mh1", "integer"), schema("mh2", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testMonthNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval mo1 = MONTH(date), mo2 = MONTH(date_time) | fields mo1, mo2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("mo1", "integer"), schema("mo2", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testMonthOfYearNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval mo1 = MONTH_OF_YEAR(date), mo2 = MONTH_OF_YEAR(date_time) |" + + " fields mo1, mo2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("mo1", "integer"), schema("mo2", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testMonthnameNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval mn1 = MONTHNAME(date), mn2 = MONTHNAME(date_time) | fields mn1," + + " mn2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("mn1", "string"), schema("mn2", "string")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testPeriodAddDiffNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | where key='null' | head 1 | eval pa1 = PERIOD_ADD(`int`, 3), pa2 =" + + " PERIOD_DIFF(`int`, `int`) | fields pa1, pa2", + TEST_INDEX_NULL_MISSING)); + + verifySchema(actual, schema("pa1", "integer"), schema("pa2", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testQuarterNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval q1 = QUARTER(date), q2 = QUARTER(date_time) | fields q1, q2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("q1", "integer"), schema("q2", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testSecToTimeNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval st1 = SEC_TO_TIME(UNIX_TIMESTAMP(date_time)), st2 =" + + " SEC_TO_TIME(UNIX_TIMESTAMP(date)) | fields st1, st2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("st1", "time"), schema("st2", "time")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testSecondNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval s1 = SECOND(time), s2 = SECOND(date_time) | fields s1, s2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("s1", "integer"), schema("s2", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testSecondOfMinuteNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval s1 = SECOND_OF_MINUTE(time), s2 = SECOND_OF_MINUTE(date_time) |" + + " fields s1, s2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("s1", "integer"), schema("s2", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testStrToDateNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval s = STR_TO_DATE(MONTHNAME(date_time), '%%M') | fields s", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("s", "timestamp")); + verifyDataRows(actual, rows((Object) null)); + } + + @Test + public void testSubdateNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval sd1 = SUBDATE(date, 3), sd2 = SUBDATE(date_time, 5) | fields sd1," + + " sd2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("sd1", "date"), schema("sd2", "timestamp")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testSubtimeNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval s1 = SUBTIME(date_time, date_time), s2 = SUBTIME(date, date), s3" + + " = SUBTIME(time, time) | fields s1, s2, s3", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema( + actual, schema("s1", "timestamp"), schema("s2", "timestamp"), schema("s3", "time")); + verifyDataRows(actual, rows(null, null, null)); + } + + @Test + public void testTimeNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval t1 = TIME(date_time) | fields t1", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("t1", "time")); + verifyDataRows(actual, rows((Object) null)); + } + + @Test + public void testTimeFormatNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval tf1 = TIME_FORMAT(time, '%%H:%%i:%%s') | fields tf1", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("tf1", "string")); + verifyDataRows(actual, rows((Object) null)); + } + + @Test + public void testTimeToSecNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval ts1 = TIME_TO_SEC(time) | fields ts1", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("ts1", "long")); + verifyDataRows(actual, rows((Object) null)); + } + + @Test + public void testTimediffNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval td1 = TIMEDIFF(time, time) | fields td1", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("td1", "time")); + verifyDataRows(actual, rows((Object) null)); + } + + @Test + public void testTimestampNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval t1 = TIMESTAMP(date, time), t2 = TIMESTAMP(date_time) | fields" + + " t1, t2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("t1", "timestamp"), schema("t2", "timestamp")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testTimestampaddNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval ta1 = TIMESTAMPADD(MONTH, 2, date), ta2 = TIMESTAMPADD(HOUR, 3," + + " date_time) | fields ta1, ta2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("ta1", "timestamp"), schema("ta2", "timestamp")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testTimestampdiffNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval td1 = TIMESTAMPDIFF(DAY, date, date_time), td2 =" + + " TIMESTAMPDIFF(HOUR, date_time, date_time) | fields td1, td2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("td1", "long"), schema("td2", "long")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testToDaysNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval td1 = TO_DAYS(date), td2 = TO_DAYS(date_time) | fields td1, td2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("td1", "long"), schema("td2", "long")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testWeekNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval w1 = WEEK(date), w2 = WEEK(date_time) | fields w1, w2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("w1", "integer"), schema("w2", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testWeekdayNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval wd1 = WEEKDAY(date), wd2 = WEEKDAY(date_time) | fields wd1, wd2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("wd1", "integer"), schema("wd2", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testWeekOfYearNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval wy1 = WEEK_OF_YEAR(date), wy2 = WEEK_OF_YEAR(date_time) | fields" + + " wy1, wy2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("wy1", "integer"), schema("wy2", "integer")); + verifyDataRows(actual, rows(null, null)); + } + + @Test + public void testYearNull() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval y1 = YEAR(date), y2 = YEAR(date_time) | fields y1, y2", + TEST_INDEX_DATE_FORMATS_WITH_NULL)); + verifySchema(actual, schema("y1", "integer"), schema("y2", "integer")); + verifyDataRows(actual, rows(null, null)); + } +} diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBuiltinFunctionsNullPushdownIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBuiltinFunctionsNullPushdownIT.java new file mode 100644 index 0000000000..2a22b50cc5 --- /dev/null +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLBuiltinFunctionsNullPushdownIT.java @@ -0,0 +1,15 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.standalone; + +import org.opensearch.sql.common.setting.Settings; + +public class CalcitePPLBuiltinFunctionsNullPushdownIT extends CalcitePPLBuiltinFunctionsNullIT { + @Override + protected Settings getSettings() { + return enablePushdown(); + } +} diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLDateTimeBuiltinFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLDateTimeBuiltinFunctionIT.java new file mode 100644 index 0000000000..3a1e6757aa --- /dev/null +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLDateTimeBuiltinFunctionIT.java @@ -0,0 +1,1428 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.standalone; + +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.exprYearweek; +import static org.opensearch.sql.expression.datetime.DateTimeFunctions.formatNow; +import static org.opensearch.sql.legacy.TestsConstants.*; +import static org.opensearch.sql.util.MatcherUtils.*; +import static org.opensearch.sql.util.MatcherUtils.rows; + +import java.io.IOException; +import java.sql.Date; +import java.time.DayOfWeek; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.Month; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.time.format.TextStyle; +import java.time.temporal.ChronoUnit; +import java.util.List; +import java.util.Locale; +import org.hamcrest.Matchers; +import org.json.JSONObject; +import org.junit.jupiter.api.Test; +import org.opensearch.client.Request; +import org.opensearch.sql.data.model.ExprDateValue; +import org.opensearch.sql.data.model.ExprIntegerValue; +import org.opensearch.sql.expression.function.FunctionProperties; + +public class CalcitePPLDateTimeBuiltinFunctionIT extends CalcitePPLIntegTestCase { + @Override + public void init() throws IOException { + super.init(); + loadIndex(Index.STATE_COUNTRY); + loadIndex(Index.STATE_COUNTRY_WITH_NULL); + loadIndex(Index.DATE_FORMATS); + loadIndex(Index.BANK_WITH_NULL_VALUES); + loadIndex(Index.DATE); + loadIndex(Index.PEOPLE2); + loadIndex(Index.BANK); + initRelativeDocs(); + } + + private static String getFormattedLocalDate() { + return LocalDateTime.now(ZoneId.systemDefault()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd")); + } + + void verifyDateFormat(String date, String type, String format, String formatted) + throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = date_format(%s('%s'), '%s') | fields f", + TEST_INDEX_DATE, type, date, format)); + verifySchema(result, schema("f", null, "string")); + verifySome(result.getJSONArray("datarows"), rows(formatted)); + + result = + executeQuery( + String.format( + "source=%s | eval f = date_format('%s', '%s') | fields f", + TEST_INDEX_DATE, date, format)); + verifySchema(result, schema("f", null, "string")); + verifySome(result.getJSONArray("datarows"), rows(formatted)); + } + + @Test + public void testDate() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval `DATE('2020-08-26')` = DATE('2020-08-26') | eval" + + " `DATE(TIMESTAMP('2020-08-26 13:49:00'))` = DATE(TIMESTAMP('2020-08-26" + + " 13:49:00')) | eval `DATE('2020-08-26 13:49')` = DATE('2020-08-26 13:49') " + + "| eval d = DATE(strict_date_time)" + + "| fields `DATE('2020-08-26')`, `DATE(TIMESTAMP('2020-08-26 13:49:00'))`," + + " `DATE('2020-08-26 13:49')`, d | head 1", + TEST_INDEX_DATE_FORMATS)); + + verifySchema( + actual, + schema("DATE('2020-08-26')", "date"), + schema("DATE(TIMESTAMP('2020-08-26 13:49:00'))", "date"), + schema("DATE('2020-08-26 13:49')", "date"), + schema("d", "date")); + + verifyDataRows( + actual, + rows( + Date.valueOf("2020-08-26"), + Date.valueOf("2020-08-26"), + Date.valueOf("2020-08-26"), + Date.valueOf("1984-04-12"))); + } + + @Test + public void testTimestamp() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | head 1 | eval `TIMESTAMP('2020-08-26 13:49:00')` =" + + " TIMESTAMP('2020-08-26 13:49:00')| eval `TIMESTAMP(DATE('2020-08-26" + + " 13:49:00'))` = TIMESTAMP(DATE('2020-08-26 13:49:00'))| eval" + + " `TIMESTAMP(TIMESTAMP('2020-08-26 13:49:00'))` =" + + " TIMESTAMP(TIMESTAMP('2020-08-26 13:49:00'))| eval" + + " `TIMESTAMP(TIME('2020-08-26 13:49:00'))` = TIMESTAMP(TIME('2020-08-26" + + " 13:49:00'))| eval `TIMESTAMP('2020-08-26 13:49:00', 2020-08-26 00:10:10)` =" + + " TIMESTAMP('2020-08-26 13:49:00', '2020-08-26 00:10:10')| eval" + + " `TIMESTAMP('2020-08-26 13:49:00', TIMESTAMP(2020-08-26 00:10:10))` =" + + " TIMESTAMP('2020-08-26 13:49:00', TIMESTAMP('2020-08-26 00:10:10'))| eval" + + " `TIMESTAMP('2020-08-26 13:49:00', DATE(2020-08-26 00:10:10))` =" + + " TIMESTAMP('2020-08-26 13:49:00', DATE('2020-08-26 00:10:10'))| eval" + + " `TIMESTAMP('2020-08-26 13:49:00', TIME(00:10:10))` = TIMESTAMP('2020-08-26" + + " 13:49:00', TIME('00:10:10')), ts = TIMESTAMP('2009-12-12" + + " 13:40:04.123456789')| fields `TIMESTAMP('2020-08-26 13:49:00')`," + + " `TIMESTAMP(DATE('2020-08-26 13:49:00'))`, `TIMESTAMP(TIMESTAMP('2020-08-26" + + " 13:49:00'))`, `TIMESTAMP(TIME('2020-08-26 13:49:00'))`," + + " `TIMESTAMP('2020-08-26 13:49:00', 2020-08-26 00:10:10)`," + + " `TIMESTAMP('2020-08-26 13:49:00', TIMESTAMP(2020-08-26 00:10:10))`," + + " `TIMESTAMP('2020-08-26 13:49:00', DATE(2020-08-26 00:10:10))`," + + " `TIMESTAMP('2020-08-26 13:49:00', TIME(00:10:10))`, ts", + TEST_INDEX_STATE_COUNTRY)); + + verifySchema( + actual, + schema("TIMESTAMP('2020-08-26 13:49:00')", "timestamp"), + schema("TIMESTAMP(DATE('2020-08-26 13:49:00'))", "timestamp"), + schema("TIMESTAMP(TIMESTAMP('2020-08-26 13:49:00'))", "timestamp"), + schema("TIMESTAMP(TIME('2020-08-26 13:49:00'))", "timestamp"), + schema("TIMESTAMP('2020-08-26 13:49:00', 2020-08-26 00:10:10)", "timestamp"), + schema("TIMESTAMP('2020-08-26 13:49:00', TIMESTAMP(2020-08-26 00:10:10))", "timestamp"), + schema("TIMESTAMP('2020-08-26 13:49:00', DATE(2020-08-26 00:10:10))", "timestamp"), + schema("TIMESTAMP('2020-08-26 13:49:00', TIME(00:10:10))", "timestamp"), + schema("ts", "timestamp")); + + verifyDataRows( + actual, + rows( + "2020-08-26 13:49:00", + "2020-08-26 00:00:00", + "2020-08-26 13:49:00", + getFormattedLocalDate() + " 13:49:00", + "2020-08-26 13:59:10", + "2020-08-26 13:59:10", + "2020-08-26 13:49:00", + "2020-08-26 13:59:10", + "2009-12-12 13:40:04.123456789")); + } + + @Test + public void testTime() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval `TIME('2020-08-26 13:49:00')` = TIME('2020-08-26 13:49:00')| eval" + + " `TIME('2020-08-26 13:49')` = TIME('2020-08-26 13:49')| eval `TIME('13:49')`" + + " = TIME('13:49')| eval `TIME('13:49:00.123')` = TIME('13:49:00.123')| eval" + + " `TIME(TIME('13:49:00'))` = TIME(TIME('13:49:00'))| eval" + + " `TIME(TIMESTAMP('2024-08-06 13:49:00'))` = TIME(TIMESTAMP('2024-08-06" + + " 13:49:00'))| eval `TIME(DATE('2024-08-06 13:49:00'))` =" + + " TIME(DATE('2024-08-06 13:49:00')), t = TIME('13:49:00.123456789')" + + " | fields `TIME('2020-08-26 13:49:00')`," + + " `TIME('2020-08-26 13:49')`, `TIME('13:49')`, `TIME('13:49:00.123')`," + + " `TIME(TIME('13:49:00'))`, `TIME(TIMESTAMP('2024-08-06 13:49:00'))`," + + " `TIME(DATE('2024-08-06 13:49:00'))`, t | head 1", + TEST_INDEX_STATE_COUNTRY)); + + verifySchema( + actual, + schema("TIME('2020-08-26 13:49:00')", "time"), + schema("TIME('2020-08-26 13:49')", "time"), + schema("TIME('13:49')", "time"), + schema("TIME('13:49:00.123')", "time"), + schema("TIME(TIME('13:49:00'))", "time"), + schema("TIME(TIMESTAMP('2024-08-06 13:49:00'))", "time"), + schema("TIME(DATE('2024-08-06 13:49:00'))", "time"), + schema("t", "time")); + + verifyDataRows( + actual, + rows( + "13:49:00", + "13:49:00", + "13:49:00", + "13:49:00.123", + "13:49:00", + "13:49:00", + "00:00:00", + "13:49:00.123456789")); + } + + @Test + public void testDateSubAndCount() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | where strict_date_optional_time > DATE_SUB(TIMESTAMP('1999-04-12" + + " 20:07:00'), INTERVAL 12 HOUR) | stats COUNT() AS CNT ", + TEST_INDEX_DATE_FORMATS)); + verifySchema(actual, schema("CNT", "long")); + + // relative ones + verifyDataRows(actual, rows(7)); + } + + @Test + public void testTimeStrToDate() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | where YEAR(strict_date_optional_time) < 2000| eval demo =" + + " str_to_date(\"01,5,2013\", \"%%d,%%m,%%Y\")| where" + + " str_to_date(\"01,5,2013\", \"%%d,%%m,%%Y\")='2013-05-01 00:00:00'| eval s2d" + + " = STR_TO_DATE('2010-09-10 12:56:45.123456', '%%Y-%%m-%%d %%T.%%f')| fields" + + " demo, s2d | head 1", + TEST_INDEX_DATE_FORMATS)); + verifySchema(actual, schema("demo", "timestamp"), schema("s2d", "timestamp")); + verifyDataRows(actual, rows("2013-05-01 00:00:00", "2010-09-10 12:56:45")); + } + + @Test + public void testTimeFormat() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | where YEAR(strict_date_optional_time) < 2000| eval" + + " timestamp=TIME_FORMAT(strict_date_optional_time, '%%h') | eval" + + " time=TIME_FORMAT(time, '%%h')| eval date=TIME_FORMAT(date, '%%h')| eval" + + " string_value=TIME_FORMAT('1998-01-31 13:14:15.012345','%%h %%i %%f' ) |" + + " where TIME_FORMAT(strict_date_optional_time, '%%h')='09'| fields timestamp," + + " time, date, string_value | head 1", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("timestamp", "string"), + schema("time", "string"), + schema("date", "string"), + schema("string_value", "string")); + verifyDataRows(actual, rows("09", "09", "12", "01 14 012345")); + } + + @Test + public void testTimeToSec() { + JSONObject actual = + executeQuery( + String.format( + "source=%s " + + "| where YEAR(strict_date_optional_time) < 2000" + + "| eval timestamp=TIME_TO_SEC(strict_date_optional_time) " + + "| eval time=TIME_TO_SEC(time)" + + "| eval date=TIME_TO_SEC(date)" + + "| eval long_value=TIME_TO_SEC('22:23:00') " + + "| where TIME_TO_SEC('22:23:00')=80580" + + "| fields timestamp, time, date, long_value | head 1", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("timestamp", "long"), + schema("time", "long"), + schema("date", "long"), + schema("long_value", "long")); + verifyDataRows(actual, rows(32862, 32862, 0, 80580)); + } + + @Test + public void testSecToTime() { + JSONObject actual = + executeQuery( + String.format( + "source=%s " + + "| where YEAR(strict_date_optional_time) < 2000" + + "| eval long_value=SEC_TO_TIME(3601) " + + "| eval double_value=SEC_TO_TIME(1234.123) " + + "| fields long_value, double_value | head 1", + TEST_INDEX_DATE_FORMATS)); + verifySchema(actual, schema("long_value", "time"), schema("double_value", "time")); + verifyDataRows(actual, rows("01:00:01", "00:20:34.123")); + } + + @Test + public void testToSeconds() { + JSONObject actual = + executeQuery( + String.format( + "source=%s " + + "| where YEAR(strict_date_optional_time) < 2000" + + "| eval timestamp=to_seconds(strict_date_optional_time) " + + "| eval date=to_seconds(date)" + + "| eval string_value=to_seconds('2008-10-07')" + + "| eval long_value = to_seconds(950228)" + + "| where to_seconds(strict_date_optional_time) > 62617795199" + + "| fields timestamp, date, string_value, long_value | head 1", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("timestamp", "long"), + schema("date", "long"), + schema("string_value", "long"), + schema("long_value", "long")); + verifyDataRows(actual, rows(62617828062L, 62617795200L, 63390556800L, 62961148800L)); + } + + @Test + public void testToDays() { + ZonedDateTime utcNow = ZonedDateTime.now(ZoneOffset.UTC); + LocalDate utcDate = utcNow.toLocalDate(); + + // Reference date: year 0 + LocalDate baseDate = LocalDate.of(0, 1, 1); + + // Calculate days since year 0 + long daysSinceYearZero = ChronoUnit.DAYS.between(baseDate, utcDate); + JSONObject actual = + executeQuery( + String.format( + "source=%s " + + "| where YEAR(strict_date_optional_time) < 2000" + + "| eval timestamp=to_days(strict_date_optional_time) " + + "| eval date=to_days(date)" + + "| eval string_value=to_days('2008-10-07')" + + "| where to_days(strict_date_optional_time) = 724743" + + "| fields timestamp, date, string_value | head 1", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("timestamp", "long"), + schema("date", "long"), + schema("string_value", "long")); + verifyDataRows(actual, rows(724743, 724743, 733687)); + } + + @Test + public void testUnixTimeStampTwoArgument() { + JSONObject actual = + executeQuery( + String.format( + "source=%s " + + "| eval from_unix = FROM_UNIXTIME(1220249547, '%%T')" + + "| fields from_unix | head 1", + TEST_INDEX_DATE_FORMATS)); + verifySchema(actual, schema("from_unix", "string")); + verifyDataRows(actual, rows("06:12:27")); + } + + @Test + public void testUnixTimeStampAndFromUnixTime() { + JSONObject actual = + executeQuery( + String.format( + "source=%s " + + "| eval from_unix = from_unixtime(1220249547)" + + "| eval to_unix = unix_timestamp(from_unix)" + // + "| where unix_timestamp(from_unixtime(1700000001)) > 1700000000 " // don't + // do + // filter + + "| fields from_unix, to_unix | head 1", + TEST_INDEX_DATE_FORMATS)); + verifySchema(actual, schema("from_unix", "timestamp"), schema("to_unix", "double")); + verifyDataRows(actual, rows("2008-09-01 06:12:27", 1220249547.0)); + } + + @Test + public void testUtcTimes() { + JSONObject actual = + executeQuery( + String.format( + "source=%s " + + "| eval timestamp=UTC_TIMESTAMP() " + + "| eval time=UTC_TIME()" + + "| eval date=UTC_DATE()" + + "| fields timestamp, time, date ", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, schema("timestamp", "timestamp"), schema("date", "date"), schema("time", "time")); + } + + @Test + public void testWeekAndWeekOfYear() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | fields strict_date_optional_time| where" + + " YEAR(strict_date_optional_time) < 2000| eval" + + " `WEEK(DATE(strict_date_optional_time))` =" + + " WEEK(DATE(strict_date_optional_time))| eval" + + " `WEEK_OF_YEAR(DATE(strict_date_optional_time))` =" + + " WEEK_OF_YEAR(DATE(strict_date_optional_time))| eval" + + " `WEEK(DATE(strict_date_optional_time), 1)` =" + + " WEEK(DATE(strict_date_optional_time), 1)| eval" + + " `WEEK_OF_YEAR(DATE(strict_date_optional_time), 1)` =" + + " WEEK_OF_YEAR(DATE(strict_date_optional_time), 1)| eval" + + " `WEEK(DATE('2008-02-20'))` = WEEK(DATE('2008-02-20'))," + + " `WEEK(DATE('2008-02-20'), 1)` = WEEK(DATE('2008-02-20'), 1)| fields" + + " `WEEK(DATE(strict_date_optional_time))`," + + " `WEEK_OF_YEAR(DATE(strict_date_optional_time))`," + + " `WEEK(DATE(strict_date_optional_time), 1)`," + + " `WEEK_OF_YEAR(DATE(strict_date_optional_time), 1)`," + + " `WEEK(DATE('2008-02-20'))`, `WEEK(DATE('2008-02-20'), 1)`| head 1 ", + TEST_INDEX_DATE_FORMATS)); + + verifySchema( + actual, + schema("WEEK(DATE(strict_date_optional_time))", "integer"), + schema("WEEK_OF_YEAR(DATE(strict_date_optional_time))", "integer"), + schema("WEEK(DATE(strict_date_optional_time), 1)", "integer"), + schema("WEEK_OF_YEAR(DATE(strict_date_optional_time), 1)", "integer"), + schema("WEEK(DATE('2008-02-20'))", "integer"), + schema("WEEK(DATE('2008-02-20'), 1)", "integer")); + + verifyDataRows(actual, rows(15, 15, 15, 15, 7, 8)); + } + + @Test + public void testWeekAndWeekOfYearWithFilter() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | fields strict_date_optional_time" + + "| where YEAR(strict_date_optional_time) < 2000" + + "| where WEEK(DATE(strict_date_optional_time)) = 15" + + "| stats COUNT() AS CNT " + + "| head 1 ", + TEST_INDEX_DATE_FORMATS)); + + verifySchema(actual, schema("CNT", "long")); + + verifyDataRows(actual, rows(2)); + } + + @Test + public void testWeekDay() { + int currentWeekDay = + formatNow(new FunctionProperties().getQueryStartClock()).getDayOfWeek().getValue() - 1; + JSONObject actual = + executeQuery( + String.format( + "source=%s | where YEAR(strict_date_optional_time) < 2000| eval" + + " timestamp=weekday(TIMESTAMP(strict_date_optional_time))," + + " time=weekday(TIME(strict_date_optional_time))," + + " date=weekday(DATE(strict_date_optional_time))| eval `weekday('2020-08-26')`" + + " = weekday('2020-08-26') | fields timestamp, time, date," + + " `weekday('2020-08-26')`| head 1 ", + TEST_INDEX_DATE_FORMATS)); + + verifySchema( + actual, + schema("timestamp", "integer"), + schema("time", "integer"), + schema("date", "integer"), + schema("weekday('2020-08-26')", "integer")); + + verifyDataRows(actual, rows(3, currentWeekDay, 3, 2)); + } + + @Test + public void testYearWeek() { + int currentYearWeek = + exprYearweek( + new ExprDateValue( + LocalDateTime.now(new FunctionProperties().getQueryStartClock()).toLocalDate()), + new ExprIntegerValue(0)) + .integerValue(); + JSONObject actual = + executeQuery( + String.format( + "source=%s | where YEAR(strict_date_optional_time) < 2000| eval" + + " timestamp=YEARWEEK(TIMESTAMP(strict_date_optional_time))," + + " date=YEARWEEK(DATE(strict_date_optional_time))| eval" + + " `YEARWEEK('2020-08-26')` = YEARWEEK('2020-08-26') | eval" + + " `YEARWEEK('2019-01-05', 1)` = YEARWEEK('2019-01-05', 1) | eval" + + " time=YEARWEEK(time) | fields timestamp, time, date," + + " `YEARWEEK('2020-08-26')`, `YEARWEEK('2019-01-05', 1)`| head 1 ", + TEST_INDEX_DATE_FORMATS)); + + verifySchema( + actual, + schema("timestamp", "integer"), + schema("time", "integer"), + schema("date", "integer"), + schema("YEARWEEK('2020-08-26')", "integer"), + schema("YEARWEEK('2019-01-05', 1)", "integer")); + + verifyDataRows(actual, rows(198415, currentYearWeek, 198415, 202034, 201901)); + } + + @Test + public void testYearWeekWithFilter() { + JSONObject actual = + executeQuery( + String.format( + "source=%s " + + "| where YEARWEEK(strict_date_optional_time) < 200000" + + "| stats COUNT() AS CNT" + + "| head 1 ", + TEST_INDEX_DATE_FORMATS)); + + verifySchema(actual, schema("CNT", "long")); + + verifyDataRows(actual, rows(2)); + } + + @Test + public void testYear() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | where YEAR(strict_date_optional_time) = 1984 | eval" + + " timestamp=YEAR(TIMESTAMP(strict_date_optional_time))," + + " date=YEAR(DATE(strict_date_optional_time))| eval `YEAR('2020-08-26')` =" + + " YEAR('2020-08-26') | fields timestamp, date, `YEAR('2020-08-26')`| head 1 ", + TEST_INDEX_DATE_FORMATS)); + + verifySchema( + actual, + schema("timestamp", "integer"), + schema("date", "integer"), + schema("YEAR('2020-08-26')", "integer")); + + verifyDataRows(actual, rows(1984, 1984, 2020)); + } + + private void initRelativeDocs() throws IOException { + List relativeList = List.of("NOW", "TMR", "+month", "-2wk", "-1d@d"); + int index = 0; + for (String time : relativeList) { + Request request = + new Request( + "PUT", + "/opensearch-sql_test_index_date_formats/_doc/%s?refresh=true".formatted(index)); + request.setJsonEntity( + "{\"strict_date_optional_time\":\"%s\"}".formatted(convertTimeExpression(time))); + + index++; + client().performRequest(request); + } + } + + private String convertTimeExpression(String expression) { + ZonedDateTime now = ZonedDateTime.now(ZoneId.of("UTC")); + ZonedDateTime result = now; + + switch (expression) { + case "NOW": + break; + case "TMR": // Tomorrow + result = now.plusDays(1).truncatedTo(ChronoUnit.DAYS); + break; + case "+month": // In one month + result = now.plusMonths(1); + break; + case "-2wk": // Two weeks ago + result = now.minusWeeks(2); + break; + case "-1d@d": // Yesterday + result = now.minusDays(1).truncatedTo(ChronoUnit.DAYS); + break; + default: + throw new IllegalArgumentException("Unknown time expression: " + expression); + } + + DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); + return result.format(formatter); + } + + @Test + public void testAddDateAndSubDateWithConditionsAndRename() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | head 1 | eval lower = SUBDATE(strict_date_optional_time_nanos, 3)," + + " upper = ADDDATE(date, 1), ts = ADDDATE(date, INTERVAL 1 DAY) | where" + + " strict_date < upper | rename strict_date as d | fields lower, upper, d, ts", + TEST_INDEX_DATE_FORMATS)); + + verifySchema( + actual, + schema("lower", "timestamp"), + schema("upper", "date"), + schema("d", "date"), + schema("ts", "timestamp")); + verifyDataRows( + actual, + rows("1984-04-09 09:07:42.000123456", "1984-04-13", "1984-04-12", "1984-04-13 00:00:00")); + } + + @Test + public void testDateAddAndSub() { + String expectedDate = getFormattedLocalDate(); + + JSONObject actual = + executeQuery( + String.format( + "source=%s " + + "| eval t1 = DATE_ADD(strict_date_optional_time, INTERVAL 1 HOUR) " + + "| eval t2 = DATE_ADD(strict_date_optional_time, INTERVAL 1 DAY) " + + "| eval t3 = DATE_ADD(strict_date, INTERVAL 1 HOUR) " + + "| eval t4 = DATE_ADD('2020-08-26 01:01:01', INTERVAL 1 DAY) " + + "| eval t5 = DATE_ADD(time, INTERVAL 1 HOUR) " + + "| eval t6 = DATE_ADD(time, INTERVAL 5 HOUR) " + + "| eval t7 = DATE_ADD(strict_date, INTERVAL 2 YEAR)" + + "| eval t8 = DATE_ADD(DATE('2020-01-30'), INTERVAL 1 MONTH)" // edge case + + "| eval t9 = DATE_ADD(DATE('2020-11-30'), INTERVAL 1 QUARTER)" // rare case + + "| eval t10 = DATE_SUB(date, INTERVAL 31 DAY)" + + "| eval t11 = DATE_SUB(basic_date_time, INTERVAL 1 HOUR)" + + "| fields t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11 " + + "| head 1", + TEST_INDEX_DATE_FORMATS)); + + verifySchema( + actual, + schema("t1", "timestamp"), + schema("t2", "timestamp"), + schema("t3", "timestamp"), + schema("t4", "timestamp"), + schema("t5", "timestamp"), + schema("t6", "timestamp"), + schema("t7", "timestamp"), + schema("t8", "timestamp"), + schema("t9", "timestamp"), + schema("t10", "timestamp"), + schema("t11", "timestamp")); + + verifyDataRows( + actual, + rows( + "1984-04-12 10:07:42", + "1984-04-13 09:07:42", + "1984-04-12 01:00:00", + "2020-08-27 01:01:01", + expectedDate + " 10:07:42", + expectedDate + " 14:07:42", + "1986-04-12 00:00:00", + "2020-02-29 00:00:00", + "2021-02-28 00:00:00", + "1984-03-12 00:00:00", + "1984-04-12 08:07:42")); + } + + @Test + public void testDateAddWithComparisonAndConditions() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | where date > DATE('1984-04-11') | eval tomorrow = DATE_ADD(date," + + " INTERVAL 1 DAY) | fields date, tomorrow", + TEST_INDEX_DATE_FORMATS)); + + verifySchema(actual, schema("date", "date"), schema("tomorrow", "timestamp")); + verifyDataRows( + actual, + rows("1984-04-12", "1984-04-13 00:00:00"), + rows("1984-04-12", "1984-04-13 00:00:00")); + } + + @org.junit.Test + public void nullDateTimeInvalidDateValueFebruary() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-02-30 10:00:00','+00:00','+00:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "timestamp")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void testComparisonBetweenDateAndTimestamp() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | where date > TIMESTAMP('1984-04-11 00:00:00') | stats COUNT() AS cnt", + TEST_INDEX_DATE_FORMATS)); + verifySchema(actual, schema("cnt", "long")); + verifyDataRows(actual, rows(2)); + } + + @Test + public void testAddSubTime() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | head 1 | eval t1 = ADDTIME(date, date) " + + "| eval t2 = ADDTIME(time, date) " + + "| eval t3 = SUBTIME(date, time)" + + "| eval t4 = ADDTIME(time, time)" + + "| eval t5 = SUBTIME(date_time, date_time)" + + "| eval t6 = SUBTIME(strict_date_optional_time_nanos, date_time)" + + "| fields t1, t2, t3, t4, t5, t6", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("t1", "timestamp"), + schema("t2", "time"), + schema("t3", "timestamp"), + schema("t4", "time"), + schema("t5", "timestamp"), + schema("t6", "timestamp")); + verifyDataRows( + actual, + rows( + "1984-04-12 00:00:00", + "09:07:42", + "1984-04-11 14:52:18", + "18:15:24", + "1984-04-12 00:00:00", + "1984-04-12 00:00:00.000123456")); + } + + /** HOUR, HOUR_OF_DAY, DATE */ + @Test + public void testHourAndDateWithConditions() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | where incomplete_1 > DATE('2000-10-01') | eval t1 = HOUR(date_time)," + + " t2 = HOUR_OF_DAY(time), t3 = HOUR('23:14:00'), t4 = HOUR('2023-12-31" + + " 16:03:00') | head 1 | fields t1, t2, t3, t4", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("t1", "integer"), + schema("t2", "integer"), + schema("t3", "integer"), + schema("t4", "integer")); + verifyDataRows(actual, rows(9, 9, 23, 16)); + } + + /** MONTH, MONTH_OF_YEAR */ + @Test + public void testMonth() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | where MONTH(date) > MONTH('2003-03-10') | head 1 |eval m1 =" + + " MONTH(date), m2 = MONTH_OF_YEAR(date_time), m3 = MONTH('2023-01-12" + + " 10:11:12') | fields m1, m2, m3", + TEST_INDEX_DATE_FORMATS)); + verifySchema(actual, schema("m1", "integer"), schema("m2", "integer"), schema("m3", "integer")); + verifyDataRows(actual, rows(4, 4, 1)); + } + + /** + * CURDATE, CURTIME, CURRENT_DATE, CURRENT_TIME, CURRENT_TIMESTAMP, NOW, LOCALTIMESTAMP, LOCALTIME + */ + @Test + public void testCurrentDateTimeWithComparison() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval cd = CURDATE(), ct = CURTIME(), cdt = CURRENT_DATE(), ctm =" + + " CURRENT_TIME(), cts = CURRENT_TIMESTAMP(), now = NOW(), lt = LOCALTIME()," + + " lts = LOCALTIMESTAMP() | where lt = lts and lts = now | where now >= cd and" + + " now = cts | fields cd, ct, cdt, ctm, cts, now, lt, lts", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("cd", "date"), + schema("ct", "time"), + schema("cdt", "date"), + schema("ctm", "time"), + schema("cts", "timestamp"), + schema("now", "timestamp"), + schema("lt", "timestamp"), + schema("lts", "timestamp")); + + // Should return all rows in the index + verifyNumOfRows(actual, 7); + } + + @Test + public void testSysdate() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | head 1 | eval d1 = SYSDATE(), d2 = SYSDATE(3), d3 = SYSDATE(6)|eval" + + " df1 = DATE_FORMAT(d1, '%%Y-%%m-%%d %%T.%%f'), df2 = DATE_FORMAT(d2," + + " '%%Y-%%m-%%d %%T.%%f'), df3 = DATE_FORMAT(d3, '%%Y-%%m-%%d %%T.%%f') |" + + " fields d1, d2, d3, df1, df2, df3", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("d1", "timestamp"), + schema("d2", "timestamp"), + schema("d3", "timestamp"), + schema("df1", "string"), + schema("df2", "string"), + schema("df3", "string")); + + final String DATETIME_P0_PATTERN = "^\\d{4}-\\d{2}-\\d{2}\\s\\d{2}:\\d{2}:\\d{2}$"; + final String DATETIME_P3_PATTERN = "^\\d{4}-\\d{2}-\\d{2}\\s\\d{2}:\\d{2}:\\d{2}\\.\\d{1,3}$"; + final String DATETIME_P6_PATTERN = "^\\d{4}-\\d{2}-\\d{2}\\s\\d{2}:\\d{2}:\\d{2}\\.\\d{1,6}$"; + final String DATETIME_P0_FMT_PATTERN = "^\\d{4}-\\d{2}-\\d{2}\\s\\d{2}:\\d{2}:\\d{2}\\.000000$"; + final String DATETIME_P3_FMT_PATTERN = + "^\\d{4}-\\d{2}-\\d{2}\\s\\d{2}:\\d{2}:\\d{2}\\.\\d{3}000$"; + final String DATETIME_P6_FMT_PATTERN = "^\\d{4}-\\d{2}-\\d{2}\\s\\d{2}:\\d{2}:\\d{2}\\.\\d{6}$"; + verify( + actual.getJSONArray("datarows").getJSONArray(0), + Matchers.matchesPattern(DATETIME_P0_PATTERN), + Matchers.matchesPattern(DATETIME_P3_PATTERN), + Matchers.matchesPattern(DATETIME_P6_PATTERN), + Matchers.matchesPattern(DATETIME_P0_FMT_PATTERN), + Matchers.matchesPattern(DATETIME_P3_FMT_PATTERN), + Matchers.matchesPattern(DATETIME_P6_FMT_PATTERN)); + } + + /** + * DAY, DAY_OF_MONTH, DAYOFMONTH, DAY_OF_WEEK, DAYOFWEEK, DAY_OF_YEAR, DAYOFYEAR f.t. ADDDATE, + * SUBDATE + */ + @Test + public void testDayOfAndAddSubDateWithConditions() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | where DAY(date) > 11 and DAY_OF_YEAR(date) < 104 | where" + + " DAY_OF_WEEK(date) = 5 | eval d1 = DAY_OF_MONTH(ADDDATE(date, 1)), d2 =" + + " DAYOFMONTH(SUBDATE(date, 3)) | eval d3 = DAY_OF_WEEK('1984-04-12'), d4 =" + + " DAYOFWEEK(ADDDATE(date, INTERVAL 1 DAY)),d5 = DAY_OF_YEAR(date_time) | head" + + " 1 | fields d1, d2, d3, d4, d5", + TEST_INDEX_DATE_FORMATS)); + + verifySchema( + actual, + schema("d1", "integer"), + schema("d2", "integer"), + schema("d3", "integer"), + schema("d4", "integer"), + schema("d5", "integer")); + verifyDataRows(actual, rows(13, 9, 5, 6, 103)); + } + + @Test + public void testDayName() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | head 1 | eval d1 = DAYNAME(date), d2 = DAYNAME('1984-04-12'), d3 =" + + " DAYNAME(date_time),m1 = MONTHNAME(date), m2 = MONTHNAME('1984-04-12" + + " 10:07:42')" + + "| fields d1, d2, d3, m1, m2", + TEST_INDEX_DATE_FORMATS)); + } + + /** + * DAYNAME, MONTHNAME, LAST_DAY, MAKEDATE + * + *

DAYNAME(STRING/DATE/TIMESTAMP) -> STRING MONTHNAME(STRING/DATE/TIMESTAMP) -> STRING + * LAST_DAY(DATE/STRING/TIMESTAMP/TIME) -> DATE (last day of the month as a DATE for a valid + * argument.) MAKE_DATE(DOUBLE, DOUBLE) -> DATE (Create a date from the year and day of year.) + */ + @Test + public void testDayNameAndMonthNameAndMakeDate() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | head 1 | eval d1 = DAYNAME(date), d2 = DAYNAME('1984-04-12'), d3 =" + + " DAYNAME(date_time),m1 = MONTHNAME(date), m2 = MONTHNAME('1984-04-12" + + " 10:07:42'),ld1 = LAST_DAY(date), ld2 = LAST_DAY('1984-04-12'), ld3 =" + + " LAST_DAY('1984-04-12 10:07:42'),md1 = MAKEDATE(2020, 1), md2 =" + + " MAKEDATE(2020, 366), md3 = MAKEDATE(2020, 367) | eval m3 = MONTHNAME(md2)," + + " ld4 = LAST_DAY(md3)| fields d1, d2, d3, m1, m2, m3, ld1, ld2, ld3, ld4," + + " md1, md2, md3", + TEST_INDEX_DATE_FORMATS)); + + verifySchema( + actual, + schema("d1", "string"), + schema("d2", "string"), + schema("d3", "string"), + schema("m1", "string"), + schema("m2", "string"), + schema("m3", "string"), + schema("ld1", "date"), + schema("ld2", "date"), + schema("ld3", "date"), + schema("ld4", "date"), + schema("md1", "date"), + schema("md2", "date"), + schema("md3", "date")); + + final String thu = DayOfWeek.THURSDAY.getDisplayName(TextStyle.FULL, Locale.getDefault()); + final String apr = Month.APRIL.getDisplayName(TextStyle.FULL, Locale.getDefault()); + final String dec = Month.DECEMBER.getDisplayName(TextStyle.FULL, Locale.getDefault()); + verifyDataRows( + actual, + rows( + thu, + thu, + thu, + apr, + apr, + dec, + "1984-04-30", + "1984-04-30", + "1984-04-30", + "2021-01-31", + "2020-01-01", + "2020-12-31", + "2021-01-01")); + } + + /** + * MAKE_DATE(DOUBLE, DOUBLE) -> DATE (Create a date from the year and day of year.) Returns a + * date, given year and day-of-year values. dayofyear must be greater than 0 or the result is + * NULL. The result is also NULL if either argument is NULL. Arguments are rounded to an integer. + * + *

Limitations: - Zero year interpreted as 2000; - Negative year is not accepted; - day-of-year + * should be greater than zero; - day-of-year could be greater than 365/366, calculation switches + * to the next year(s) (see example). + */ + @Test + public void testMakeDateWithNullIO() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | where firstname = 'Virginia' | eval md1 = MAKEDATE(2020, 1), md2 =" + + " MAKEDATE(2020, 366), md3 = MAKEDATE(2020, 367),md4 = MAKEDATE(0, 78), md5 =" + + " MAKEDATE(2008, 0), md6 = MAKEDATE(age, 70) | fields md1, md2, md3, md4," + + " md5, md6", + TEST_INDEX_BANK_WITH_NULL_VALUES)); + + verifySchema( + actual, + schema("md1", "date"), + schema("md2", "date"), + schema("md3", "date"), + schema("md4", "date"), + schema("md5", "date"), + schema("md6", "date")); + verifyDataRows( + actual, rows("2020-01-01", "2020-12-31", "2021-01-01", "2000-03-18", null, null)); + } + + /** + * DATE_FORMAT: (STRING/DATE/TIME/TIMESTAMP) -> STRING formats the date argument using the + * specifiers in the format argument FROM_DAYS: (Integer/Long) -> DATE from_days(N) returns the + * date value given the day number N. DATETIME: (TIMESTAMP, STRING) -> TIMESTAMP (TIMESTAMP) -> + * TIMESTAMP Converts the datetime to a new timezone + */ + @Test + public void testDateFormatAndDatetimeAndFromDays() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | head 1 | eval d1 = DATE_FORMAT(date, '%%Y-%%m-%%d'), d2 =" + + " DATE_FORMAT('1984-04-12', '%%Y-%%b-%%D %%r'),d3 = DATE_FORMAT(date_time," + + " '%%d.%%m.%%y %%l:%%i %%p'), d4 = DATE_FORMAT(time, '%%T'),d5 =" + + " DATE_FORMAT('2020-08-26 13:49:00', '%%a %%c %%e %%H %%h %%j %%k %%M %%S %%s" + + " %%W %%w %%'),d6 = FROM_DAYS(737000), d9 = DATETIME(date_time, '+08:00')," + + " d10 = DATETIME('1984-04-12 09:07:42', '+00:00')| eval d11 = DATE_FORMAT(d9," + + " '%%U %%X %%V'), d12 = DATE_FORMAT(d10, '%%u %%v %%x'), d13 =" + + " DATE_FORMAT(strict_date_time, '%%T.%%f')| fields d1, d2, d3, d4, d5, d6," + + " d9, d10, d11, d12, d13", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("d1", "string"), + schema("d2", "string"), + schema("d3", "string"), + schema("d4", "string"), + schema("d5", "string"), + schema("d6", "date"), + schema("d9", "timestamp"), + schema("d10", "timestamp"), + schema("d11", "string"), + schema("d12", "string"), + schema("d13", "string")); + + Instant expectedInstant = + LocalDateTime.parse("1984-04-12T09:07:42").atZone(ZoneOffset.systemDefault()).toInstant(); + LocalDateTime offsetUTC = LocalDateTime.ofInstant(expectedInstant, ZoneOffset.UTC); + LocalDateTime offsetPlus8 = LocalDateTime.ofInstant(expectedInstant, ZoneId.of("+08:00")); + String expectedDatetimeAtUTC = + offsetUTC.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")); + String expectedDatetimeAtPlus8 = + offsetPlus8.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")); + + verifyDataRows( + actual, + rows( + "1984-04-12", + "1984-Apr-12th 12:00:00 AM", + "12.04.84 9:07 AM", + "09:07:42", + "Wed 08 26 13 01 239 13 August 00 00 Wednesday 3 %", + "2017-11-02", + expectedDatetimeAtPlus8, + expectedDatetimeAtUTC, + "15 1984 15", + "15 15 1984", + "09:07:42.000123")); + } + + @Test + public void testDateDiffAndMakeTime() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | head 1 | eval d1 = DATEDIFF(date, ADDDATE(date, INTERVAL 1 DAY)), " + + "d2 = DATEDIFF(date, SUBDATE(date, INTERVAL 50 DAY)), " + + "d3 = DATEDIFF(date, TIME('20:59')), " + + "d4 = DATEDIFF(date_time, SUBDATE(date, 1024)), " + + "d5 = DATEDIFF(date_time, TIMESTAMP('2020-08-26 13:49:00')), " + + "d6 = DATEDIFF(date_time, time), " + + "d7 = DATEDIFF(MAKETIME(20, 30, 40), date)," + + "d8 = DATEDIFF(time, date_time), " + + "d9 = DATEDIFF(TIME('13:20:00'), time)," + + "t = MAKETIME(20.2, 49.5, 42.100502)" + + "| fields d1, d2, d3, d4, d5, d6, d7, d8, d9, t", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("d1", "long"), + schema("d2", "long"), + schema("d3", "long"), + schema("d4", "long"), + schema("d5", "long"), + schema("d6", "long"), + schema("d7", "long"), + schema("d8", "long"), + schema("d9", "long"), + schema("t", "time")); + + LocalDate today = LocalDate.now(ZoneId.systemDefault()); + long dateDiffWithToday = ChronoUnit.DAYS.between(LocalDate.parse("1984-04-12"), today); + verifyDataRows( + actual, + rows( + -1, + 50, + -dateDiffWithToday, + 1024, + -13285, + -dateDiffWithToday, + dateDiffWithToday, + dateDiffWithToday, + 0, + "20:50:42.100502")); + } + + @Test + public void testTimestampDiffAndTimestampAdd() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | head 1 | eval d1 = TIMESTAMPDIFF(DAY, SUBDATE(date_time, INTERVAL 1" + + " DAY), date), d2 = TIMESTAMPDIFF(HOUR, date_time, TIMESTAMPADD(DAY, 1," + + " date_time)), d3 = TIMESTAMPDIFF(MINUTE, date, date_time), d4 =" + + " TIMESTAMPDIFF(SECOND, date_time, ADDDATE(date_time, INTERVAL 1 HOUR)), d5 =" + + " TIMESTAMPDIFF(MINUTE, time, '12:30:00'), d6 = TIMESTAMPDIFF(WEEK," + + " '1999-12-31 00:00:00', TIMESTAMPADD(HOUR, -24, date_time)), d7 =" + + " TIMESTAMPDIFF(MONTH, TIMESTAMPADD(YEAR, 5, '1994-12-10 13:49:02')," + + " ADDDATE(date_time, 1)), d8 = TIMESTAMPDIFF(QUARTER, MAKEDATE(2008, 153)," + + " date), d9 = TIMESTAMPDIFF(YEAR, date, '2013-06-19 00:00:00'), t =" + + " TIMESTAMPADD(MICROSECOND, 1, date_time) | eval d10 =" + + " TIMESTAMPDIFF(MICROSECOND, t, date_time) | fields d1, d2, d3, d4, d5, d6," + + " d7, d8, d9, t, d10", + TEST_INDEX_DATE_FORMATS)); + + verifySchema( + actual, + schema("d1", "long"), + schema("d2", "long"), + schema("d3", "long"), + schema("d4", "long"), + schema("d5", "long"), + schema("d6", "long"), + schema("d7", "long"), + schema("d8", "long"), + schema("d9", "long"), + schema("t", "timestamp"), + schema("d10", "long")); + + verifyDataRows( + actual, rows(0, 24, 547, 3600, 202, -820, -187, -96, 29, "1984-04-12 09:07:42.000001", -1)); + } + + @Test + public void testPeriodAddAndPeriodDiff() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | head 1 | eval p1 = PERIOD_ADD(200801, 3), " + + "p2 = PERIOD_ADD(199307, -13), " + + "p3 = PERIOD_DIFF(200802, 200703), " + + "p4 = PERIOD_DIFF(200802, 201003) " + + "| fields p1, p2, p3, p4", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("p1", "integer"), + schema("p2", "integer"), + schema("p3", "integer"), + schema("p4", "integer")); + + verifyDataRows(actual, rows(200804, 199206, 11, -25)); + } + + @Test + public void testMinuteOfHourAndMinuteOfDay() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | head 1 | eval m1 = MINUTE_OF_HOUR(date_time), " + + "m2 = MINUTE(time), " + + "m3 = MINUTE_OF_DAY(strict_date_time), " + + "m4 = MINUTE_OF_DAY(time), " + + "m5 = MINUTE('2009-10-19 23:40:27.123456'), " + + "m6 = MINUTE_OF_HOUR('16:20:39') " + + "| fields m1, m2, m3, m4, m5, m6", + TEST_INDEX_DATE_FORMATS)); + + verifySchema( + actual, + schema("m1", "integer"), + schema("m2", "integer"), + schema("m3", "integer"), + schema("m4", "integer"), + schema("m5", "integer"), + schema("m6", "integer")); + + verifyDataRows(actual, rows(7, 7, 547, 547, 40, 20)); + } + + @Test + public void testTimeDiff() { + JSONObject actual = + executeQuery( + String.format( + "source = %s | head 1 | eval t1 = TIMEDIFF('23:59:59', '13:00:00')," + + "t2 = TIMEDIFF(time, '13:00:00')," + + "t3 = TIMEDIFF(time, time) " + + "| fields t1, t2, t3", + TEST_INDEX_DATE_FORMATS)); + + verifySchema(actual, schema("t1", "time"), schema("t2", "time"), schema("t3", "time")); + + verifyDataRows(actual, rows("10:59:59", "20:07:42", "00:00:00")); + } + + @Test + public void testQuarter() { + JSONObject actual = + executeQuery( + String.format( + "source=%s " + + "| eval `QUARTER(DATE('2020-08-26'))` = QUARTER(DATE('2020-08-26')) " + + "| eval quarter2 = QUARTER(basic_date) " + + "| eval timestampQuarter2 = QUARTER(basic_date_time) " + + "| fields `QUARTER(DATE('2020-08-26'))`, quarter2, timestampQuarter2 " + + "| head 1", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("QUARTER(DATE('2020-08-26'))", "integer"), + schema("quarter2", "integer"), + schema("timestampQuarter2", "integer")); + verifyDataRows(actual, rows(3, 2, 2)); + } + + @Test + public void testSecond() { + JSONObject actual = + executeQuery( + String.format( + "source=%s " + + "| eval s = SECOND(TIMESTAMP('01:02:03')) " + + "| eval secondForTime = SECOND(basic_time) " + + "| eval secondForDate = SECOND(basic_date) " + + "| eval secondForTimestamp = SECOND(strict_date_optional_time_nanos) " + + "| fields s, secondForTime, secondForDate, secondForTimestamp " + + "| head 1", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("s", "integer"), + schema("secondForTime", "integer"), + schema("secondForDate", "integer"), + schema("secondForTimestamp", "integer")); + verifyDataRows(actual, rows(3, 42, 0, 42)); + } + + @Test + public void testSecondOfMinute() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval s = second_of_minute(TIMESTAMP('01:02:03')) | eval secondForTime" + + " = second_of_minute(basic_time) | eval secondForDate =" + + " second_of_minute(basic_date) | eval secondForTimestamp =" + + " second_of_minute(strict_date_optional_time_nanos) | fields s," + + " secondForTime, secondForDate, secondForTimestamp | head 1", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("s", "integer"), + schema("secondForTime", "integer"), + schema("secondForDate", "integer"), + schema("secondForTimestamp", "integer")); + verifyDataRows(actual, rows(3, 42, 0, 42)); + } + + @Test + public void testConvertTz() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | head 1 | eval r1 = convert_tz('2008-05-15 12:00:00', '+00:00'," + + " '+10:00') | eval r2 = convert_tz(TIMESTAMP('2008-05-15 12:00:00')," + + " '+00:00', '+10:00') | eval r3 = convert_tz(date_time, '+00:00', '+10:00') |" + + " eval r4 = convert_tz('2008-05-15 12:00:00', '-00:00', '+00:00') | eval r5 =" + + " convert_tz('2008-05-15 12:00:00', '+10:00', '+11:00') | eval r6 =" + + " convert_tz('2021-05-12 11:34:50', '-08:00', '+09:00') | eval r7 =" + + " convert_tz('2021-05-12 11:34:50', '-12:00', '+12:00') | eval r8 =" + + " convert_tz('2021-05-12 13:00:00', '+09:30', '+05:45') | eval r9 =" + + " convert_tz(strict_date_time, '+09:00', '+05:00')| fields r1, r2, r3, r4," + + " r5, r6, r7, r8, r9", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("r1", "timestamp"), + schema("r2", "timestamp"), + schema("r3", "timestamp"), + schema("r4", "timestamp"), + schema("r5", "timestamp"), + schema("r6", "timestamp"), + schema("r7", "timestamp"), + schema("r8", "timestamp"), + schema("r9", "timestamp")); + verifyDataRows( + actual, + rows( + "2008-05-15 22:00:00", + "2008-05-15 22:00:00", + "1984-04-12 19:07:42", + "2008-05-15 12:00:00", + "2008-05-15 13:00:00", + "2021-05-13 04:34:50", + "2021-05-13 11:34:50", + "2021-05-12 09:15:00", + null)); + } + + @Test + public void testConvertTzWithInvalidResult() { + JSONObject actual = + executeQuery( + String.format( + "source=%s " + + "| eval r1 = convert_tz('2021-05-30 11:34:50', '-17:00', '+08:00') " + + "| eval r2 = convert_tz('2021-05-12 11:34:50', '-12:00', '+15:00') " + + "| eval r3 = convert_tz('2021-05-12 11:34:50', '-12:00', 'test') " + + "| fields r1, r2, r3" + + "| head 1", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, schema("r1", "timestamp"), schema("r2", "timestamp"), schema("r3", "timestamp")); + verifyDataRows(actual, rows(null, null, null)); + } + + @Test + public void testGetFormat() { + JSONObject actual = + executeQuery( + String.format( + "source=%s " + + "| eval r1 = GET_FORMAT(DATE, 'USA') " + + "| eval r2 = GET_FORMAT(TIME, 'INTERNAL') " + + "| eval r3 = GET_FORMAT(TIMESTAMP, 'EUR') " + + "| eval r4 = GET_FORMAT(TIMESTAMP, 'UTC') " + + "| fields r1, r2, r3, r4" + + "| head 1", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("r1", "string"), + schema("r2", "string"), + schema("r3", "string"), + schema("r4", "string")); + verifyDataRows(actual, rows("%m.%d.%Y", "%H%i%s", "%Y-%m-%d %H.%i.%s", null)); + } + + @Test + public void testExtractWithSimpleFormats() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval r1 = extract(YEAR FROM '1997-01-01 00:00:00') | eval r2 =" + + " extract(YEAR FROM strict_date_optional_time_nanos) | eval r3 = extract(year" + + " FROM basic_date) | eval r4 = extract(QUARTER FROM" + + " strict_date_optional_time_nanos) | eval r5 = extract(quarter FROM" + + " basic_date) | eval r6 = extract(MONTH FROM strict_date_optional_time_nanos)" + + " | eval r7 = extract(month FROM basic_date) | eval r8 = extract(WEEK FROM" + + " strict_date_optional_time_nanos) | eval r9 = extract(week FROM basic_date)" + + " | eval r10 = extract(DAY FROM strict_date_optional_time_nanos) | eval r11 =" + + " extract(day FROM basic_date) | eval r12 = extract(HOUR FROM" + + " strict_date_optional_time_nanos) | eval r13 = extract(hour FROM basic_time)" + + " | eval r14 = extract(MINUTE FROM strict_date_optional_time_nanos) | eval" + + " r15 = extract(minute FROM basic_time) | eval r16 = extract(SECOND FROM" + + " strict_date_optional_time_nanos) | eval r17 = extract(second FROM" + + " basic_time) | eval r19 =" + + " extract(day FROM '1984-04-12') | eval r20 = extract(MICROSECOND FROM" + + " timestamp('1984-04-12 09:07:42.123456789')) | fields r1, r2, r3, r4, r5," + + " r6, r7, r8, r9, r10, r11, r12, r13, r14, r15, r16, r17, r19, r20 |" + + " head 1", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("r1", "long"), + schema("r2", "long"), + schema("r3", "long"), + schema("r4", "long"), + schema("r5", "long"), + schema("r6", "long"), + schema("r7", "long"), + schema("r8", "long"), + schema("r9", "long"), + schema("r10", "long"), + schema("r11", "long"), + schema("r12", "long"), + schema("r13", "long"), + schema("r14", "long"), + schema("r15", "long"), + schema("r16", "long"), + schema("r17", "long"), + schema("r19", "long"), + schema("r20", "long")); + verifyDataRows( + actual, rows(1997, 1984, 1984, 2, 2, 4, 4, 15, 15, 12, 12, 9, 9, 7, 7, 42, 42, 12, 123456)); + } + + @Test + public void testExtractWithComplexFormats() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | eval r1 = extract(YEAR_MONTH FROM '1997-01-01 00:00:00') | eval r2 =" + + " extract(DAY_HOUR FROM strict_date_optional_time_nanos) | eval r3 =" + + " extract(DAY_HOUR FROM basic_date) | eval r4 = extract(DAY_MINUTE FROM" + + " strict_date_optional_time_nanos) | eval r5 = extract(DAY_MINUTE FROM" + + " basic_date) | eval r6 = extract(DAY_SECOND FROM" + + " strict_date_optional_time_nanos) | eval r7 = extract(DAY_SECOND FROM" + + " basic_date) | eval r8 = extract(HOUR_MINUTE FROM" + + " strict_date_optional_time_nanos) | eval r9 = extract(HOUR_MINUTE FROM" + + " basic_time) | eval r10 = extract(HOUR_SECOND FROM" + + " strict_date_optional_time_nanos) | eval r11 = extract(HOUR_SECOND FROM" + + " basic_time) | eval r12 = extract(MINUTE_SECOND FROM" + + " strict_date_optional_time_nanos) | eval r13 = extract(MINUTE_SECOND FROM" + + " basic_time) | eval r14 = extract(DAY_MICROSECOND FROM" + + " strict_date_optional_time_nanos) | eval r15 = extract(HOUR_MICROSECOND FROM" + + " strict_date_optional_time_nanos) | eval r16 = extract(MINUTE_MICROSECOND" + + " FROM strict_date_optional_time_nanos) | eval r17 =" + + " extract(SECOND_MICROSECOND FROM strict_date_optional_time_nanos) | eval r18" + + " = extract(MICROSECOND FROM strict_date_optional_time_nanos) | fields r1," + + " r2, r3, r4, r5, r6, r7, r8, r9, r10, r11, r12, r13, r14, r15, r16, r17, r18" + + " | head 1", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("r1", "long"), + schema("r2", "long"), + schema("r3", "long"), + schema("r4", "long"), + schema("r5", "long"), + schema("r6", "long"), + schema("r7", "long"), + schema("r8", "long"), + schema("r9", "long"), + schema("r10", "long"), + schema("r11", "long"), + schema("r12", "long"), + schema("r13", "long"), + schema("r14", "long"), + schema("r15", "long"), + schema("r16", "long"), + schema("r17", "long"), + schema("r18", "long")); + verifyDataRows( + actual, + rows( + 199701, + 1209, + 1200, + 120907, + 120000, + 12090742, + 12000000, + 907, + 907, + 90742, + 90742, + 742, + 742, + 12090742000123L, + 90742000123L, + 742000123, + 42000123, + 123)); + } + + @Test + public void testMicrosecond() { + JSONObject actual = + executeQuery( + String.format( + "source=%s | head 1 | eval m1 = MICROSECOND(date_time), m2 = MICROSECOND(time), m3" + + " = MICROSECOND(date), m4 = MICROSECOND('13:45:22.123456789'), m5 =" + + " MICROSECOND('2012-09-13 13:45:22.123456789')| fields m1, m2, m3, m4, m5", + TEST_INDEX_DATE_FORMATS)); + verifySchema( + actual, + schema("m1", "integer"), + schema("m2", "integer"), + schema("m3", "integer"), + schema("m4", "integer"), + schema("m5", "integer")); + verifyDataRows(actual, rows(0, 0, 0, 123456, 123456)); + } +} diff --git a/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLDateTimeBuiltinFunctionPushdownIT.java b/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLDateTimeBuiltinFunctionPushdownIT.java new file mode 100644 index 0000000000..97d003e8ff --- /dev/null +++ b/integ-test/src/test/java/org/opensearch/sql/calcite/standalone/CalcitePPLDateTimeBuiltinFunctionPushdownIT.java @@ -0,0 +1,15 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.calcite.standalone; + +import org.opensearch.sql.common.setting.Settings; + +public class CalcitePPLDateTimeBuiltinFunctionPushdownIT extends CalcitePPLBuiltinFunctionIT { + @Override + protected Settings getSettings() { + return enablePushdown(); + } +} diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java index 3a476fd11d..6cfbaba7f8 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java @@ -762,6 +762,11 @@ public enum Index { "date_formats", getMappingFile("date_formats_index_mapping.json"), "src/test/resources/date_formats.json"), + DATE_FORMATS_WITH_NULL( + TestsConstants.TEST_INDEX_DATE_FORMATS_WITH_NULL, + "date_formats_null", + getMappingFile("date_formats_index_mapping.json"), + "src/test/resources/date_formats_with_null.json"), WILDCARD( TestsConstants.TEST_INDEX_WILDCARD, "wildcard", diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/TestsConstants.java b/integ-test/src/test/java/org/opensearch/sql/legacy/TestsConstants.java index 39c6248129..824b2dd5c3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/TestsConstants.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/TestsConstants.java @@ -54,6 +54,8 @@ public class TestsConstants { public static final String TEST_INDEX_NULL_MISSING = TEST_INDEX + "_null_missing"; public static final String TEST_INDEX_CALCS = TEST_INDEX + "_calcs"; public static final String TEST_INDEX_DATE_FORMATS = TEST_INDEX + "_date_formats"; + public static final String TEST_INDEX_DATE_FORMATS_WITH_NULL = + TEST_INDEX + "_date_formats_with_null"; public static final String TEST_INDEX_WILDCARD = TEST_INDEX + "_wildcard"; public static final String TEST_INDEX_MULTI_NESTED_TYPE = TEST_INDEX + "_multi_nested"; public static final String TEST_INDEX_NESTED_WITH_NULLS = TEST_INDEX + "_nested_with_nulls"; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/NowLikeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/NowLikeFunctionIT.java index 2d94dc6a3b..fc684340b1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/NowLikeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/NowLikeFunctionIT.java @@ -223,7 +223,7 @@ public void testNowLikeFunctions() throws IOException { executeQuery( "source=" + TEST_INDEX_PEOPLE2 - + " | eval " + + "| eval " + calls.stream() .map(c -> String.format("`%s`=%s", c, c)) .collect(Collectors.joining(",")) diff --git a/integ-test/src/test/resources/date_formats_with_null.json b/integ-test/src/test/resources/date_formats_with_null.json new file mode 100644 index 0000000000..f7d88b4d63 --- /dev/null +++ b/integ-test/src/test/resources/date_formats_with_null.json @@ -0,0 +1,2 @@ +{"index": {}} +{"epoch_millis": null, "epoch_second": null, "date_optional_time": null, "strict_date_optional_time": null, "strict_date_optional_time_nanos": null, "basic_date": null, "basic_date_time": null, "basic_date_time_no_millis": null, "basic_ordinal_date": null, "basic_ordinal_date_time": null, "basic_ordinal_date_time_no_millis": null, "basic_time": null, "basic_time_no_millis": null, "basic_t_time": null, "basic_t_time_no_millis": null, "basic_week_date": null, "strict_basic_week_date": null, "basic_week_date_time": null, "strict_basic_week_date_time": null, "basic_week_date_time_no_millis": null, "strict_basic_week_date_time_no_millis": null, "date": null, "strict_date": null, "date_hour": null, "strict_date_hour": null, "date_hour_minute": null, "strict_date_hour_minute": null, "date_hour_minute_second": null, "strict_date_hour_minute_second": null, "date_hour_minute_second_fraction": null, "strict_date_hour_minute_second_fraction": null, "date_hour_minute_second_millis": null, "strict_date_hour_minute_second_millis": null, "date_time": null, "strict_date_time": null, "date_time_no_millis": null, "strict_date_time_no_millis": null, "hour": null, "strict_hour": null, "hour_minute": null, "strict_hour_minute": null, "hour_minute_second": null, "strict_hour_minute_second": null, "hour_minute_second_fraction": null, "strict_hour_minute_second_fraction": null, "hour_minute_second_millis": null, "strict_hour_minute_second_millis": null, "ordinal_date": null, "strict_ordinal_date": null, "ordinal_date_time": null, "strict_ordinal_date_time": null, "ordinal_date_time_no_millis": null, "strict_ordinal_date_time_no_millis": null, "time": null, "strict_time": null, "time_no_millis": null, "strict_time_no_millis": null, "t_time": null, "strict_t_time": null, "t_time_no_millis": null, "strict_t_time_no_millis": null, "week_date": null, "strict_week_date": null, "week_date_time": null, "strict_week_date_time": null, "week_date_time_no_millis": null, "strict_week_date_time_no_millis": null, "weekyear_week_day": null, "strict_weekyear_week_day": null, "year_month_day": null, "strict_year_month_day": null, "yyyy-MM-dd": null, "custom_time": null, "yyyy-MM-dd_OR_epoch_millis": null, "hour_minute_second_OR_t_time": null, "custom_timestamp": null, "custom_date_or_date": null, "custom_date_or_custom_time": null, "custom_time_parser_check": null, "incomplete_1": null, "incomplete_2": null, "incomplete_custom_date": null, "incomplete_custom_time": null, "incorrect": null, "epoch_sec": null, "epoch_milli": null, "custom_no_delimiter_date": null, "custom_no_delimiter_time": null, "custom_no_delimiter_ts": null} diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/calcite/CalcitePPLDateTimeFunctionTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/calcite/CalcitePPLDateTimeFunctionTest.java index f8bc5ebd63..0de7f5e669 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/calcite/CalcitePPLDateTimeFunctionTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/calcite/CalcitePPLDateTimeFunctionTest.java @@ -8,6 +8,7 @@ import java.time.LocalDate; import org.apache.calcite.rel.RelNode; import org.apache.calcite.test.CalciteAssert; +import org.junit.Ignore; import org.junit.Test; public class CalcitePPLDateTimeFunctionTest extends CalcitePPLAbstractTest { @@ -16,14 +17,15 @@ public CalcitePPLDateTimeFunctionTest() { super(CalciteAssert.SchemaSpec.SCOTT_WITH_TEMPORAL); } + @Ignore("Ignore since we don't have this data source in real environment") @Test public void testDateAndCurrentTimestamp() { String ppl = "source=EMP | eval added = DATE(CURRENT_TIMESTAMP()) | fields added | head 1"; RelNode root = getRelNode(ppl); String expectedLogical = - "" - + "LogicalSort(fetch=[1])\n" - + " LogicalProject(added=[DATE(CURRENT_TIMESTAMP)])\n" + "LogicalSort(fetch=[1])\n" + + " LogicalProject(added=[POSTPROCESS(DATE(PREPROCESS(POSTPROCESS(CURRENT_TIMESTAMP," + + " FLAG(TIMESTAMP)), FLAG(TIMESTAMP))), FLAG(DATE))])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; verifyLogical(root, expectedLogical); String expectedResult = "added=" + LocalDate.now() + "\n"; @@ -34,6 +36,7 @@ public void testDateAndCurrentTimestamp() { verifyPPLToSparkSQL(root, expectedSparkSql); } + @Ignore("Ignore since we don't have this data source in real environment") @Test public void testCurrentDate() { String ppl = "source=EMP | eval added = CURRENT_DATE() | fields added | head 1"; @@ -41,7 +44,7 @@ public void testCurrentDate() { String expectedLogical = "" + "LogicalSort(fetch=[1])\n" - + " LogicalProject(added=[CURRENT_DATE])\n" + + " LogicalProject(added=[POSTPROCESS(CURRENT_DATE, FLAG(DATE))])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; verifyLogical(root, expectedLogical); String expectedResult = "added=" + LocalDate.now() + "\n";