Skip to content

Commit 70c0b19

Browse files
committed
initial commit
1 parent 7087677 commit 70c0b19

File tree

6 files changed

+835
-2
lines changed

6 files changed

+835
-2
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala

Lines changed: 27 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,11 @@ package org.apache.spark.sql.catalyst.expressions
2020
import org.apache.spark.QueryContext
2121
import org.apache.spark.sql.catalyst.InternalRow
2222
import org.apache.spark.sql.catalyst.analysis._
23+
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch
2324
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, CodeGenerator, ExprCode}
2425
import org.apache.spark.sql.catalyst.trees.TreePattern.{EXTRACT_VALUE, TreePattern}
2526
import org.apache.spark.sql.catalyst.util.{quoteIdentifier, ArrayData, GenericArrayData, MapData, TypeUtils}
26-
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
27+
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryErrorsBase, QueryExecutionErrors}
2728
import org.apache.spark.sql.internal.SQLConf
2829
import org.apache.spark.sql.types._
2930

@@ -90,7 +91,7 @@ object ExtractValue {
9091
}
9192
}
9293

93-
trait ExtractValue extends Expression {
94+
trait ExtractValue extends Expression with QueryErrorsBase {
9495
override def nullIntolerant: Boolean = true
9596
final override val nodePatterns: Seq[TreePattern] = Seq(EXTRACT_VALUE)
9697
val child: Expression
@@ -314,6 +315,30 @@ case class GetArrayItem(
314315
})
315316
}
316317

318+
override def checkInputDataTypes(): TypeCheckResult = {
319+
(left.dataType, right.dataType) match {
320+
case (_: ArrayType, e2) if !e2.isInstanceOf[IntegralType] =>
321+
DataTypeMismatch(
322+
errorSubClass = "UNEXPECTED_INPUT_TYPE",
323+
messageParameters = Map(
324+
"paramIndex" -> ordinalNumber(1),
325+
"requiredType" -> toSQLType(IntegralType),
326+
"inputSql" -> toSQLExpr(right),
327+
"inputType" -> toSQLType(right.dataType))
328+
)
329+
case (e1, _) if !e1.isInstanceOf[ArrayType] =>
330+
DataTypeMismatch(
331+
errorSubClass = "UNEXPECTED_INPUT_TYPE",
332+
messageParameters = Map(
333+
"paramIndex" -> ordinalNumber(0),
334+
"requiredType" -> toSQLType(TypeCollection(ArrayType)),
335+
"inputSql" -> toSQLExpr(left),
336+
"inputType" -> toSQLType(left.dataType))
337+
)
338+
case _ => TypeCheckResult.TypeCheckSuccess
339+
}
340+
}
341+
317342
override protected def withNewChildrenInternal(
318343
newLeft: Expression, newRight: Expression): GetArrayItem =
319344
copy(child = newLeft, ordinal = newRight)

sql/core/src/test/resources/sql-tests/analyzer-results/array.sql.out

Lines changed: 192 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -427,6 +427,198 @@ Project [get(array(1, 2, 3), -1) AS get(array(1, 2, 3), -1)#x]
427427
+- OneRowRelation
428428

429429

430+
-- !query
431+
select get(1, 0)
432+
-- !query analysis
433+
org.apache.spark.sql.catalyst.ExtendedAnalysisException
434+
{
435+
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
436+
"sqlState" : "42K09",
437+
"messageParameters" : {
438+
"inputSql" : "\"1\"",
439+
"inputType" : "\"INT\"",
440+
"paramIndex" : "first",
441+
"requiredType" : "(\"ARRAY\")",
442+
"sqlExpr" : "\"1[0]\""
443+
},
444+
"queryContext" : [ {
445+
"objectType" : "",
446+
"objectName" : "",
447+
"startIndex" : 8,
448+
"stopIndex" : 16,
449+
"fragment" : "get(1, 0)"
450+
} ]
451+
}
452+
453+
454+
-- !query
455+
select get(1, -1)
456+
-- !query analysis
457+
org.apache.spark.sql.catalyst.ExtendedAnalysisException
458+
{
459+
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
460+
"sqlState" : "42K09",
461+
"messageParameters" : {
462+
"inputSql" : "\"1\"",
463+
"inputType" : "\"INT\"",
464+
"paramIndex" : "first",
465+
"requiredType" : "(\"ARRAY\")",
466+
"sqlExpr" : "\"1[-1]\""
467+
},
468+
"queryContext" : [ {
469+
"objectType" : "",
470+
"objectName" : "",
471+
"startIndex" : 8,
472+
"stopIndex" : 17,
473+
"fragment" : "get(1, -1)"
474+
} ]
475+
}
476+
477+
478+
-- !query
479+
select get('1', 0)
480+
-- !query analysis
481+
org.apache.spark.sql.catalyst.ExtendedAnalysisException
482+
{
483+
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
484+
"sqlState" : "42K09",
485+
"messageParameters" : {
486+
"inputSql" : "\"1\"",
487+
"inputType" : "\"STRING\"",
488+
"paramIndex" : "first",
489+
"requiredType" : "(\"ARRAY\")",
490+
"sqlExpr" : "\"1[0]\""
491+
},
492+
"queryContext" : [ {
493+
"objectType" : "",
494+
"objectName" : "",
495+
"startIndex" : 8,
496+
"stopIndex" : 18,
497+
"fragment" : "get('1', 0)"
498+
} ]
499+
}
500+
501+
502+
-- !query
503+
select get('1', -1)
504+
-- !query analysis
505+
org.apache.spark.sql.catalyst.ExtendedAnalysisException
506+
{
507+
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
508+
"sqlState" : "42K09",
509+
"messageParameters" : {
510+
"inputSql" : "\"1\"",
511+
"inputType" : "\"STRING\"",
512+
"paramIndex" : "first",
513+
"requiredType" : "(\"ARRAY\")",
514+
"sqlExpr" : "\"1[-1]\""
515+
},
516+
"queryContext" : [ {
517+
"objectType" : "",
518+
"objectName" : "",
519+
"startIndex" : 8,
520+
"stopIndex" : 19,
521+
"fragment" : "get('1', -1)"
522+
} ]
523+
}
524+
525+
526+
-- !query
527+
select get(null, 0)
528+
-- !query analysis
529+
org.apache.spark.sql.catalyst.ExtendedAnalysisException
530+
{
531+
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
532+
"sqlState" : "42K09",
533+
"messageParameters" : {
534+
"inputSql" : "\"NULL\"",
535+
"inputType" : "\"VOID\"",
536+
"paramIndex" : "first",
537+
"requiredType" : "(\"ARRAY\")",
538+
"sqlExpr" : "\"NULL[0]\""
539+
},
540+
"queryContext" : [ {
541+
"objectType" : "",
542+
"objectName" : "",
543+
"startIndex" : 8,
544+
"stopIndex" : 19,
545+
"fragment" : "get(null, 0)"
546+
} ]
547+
}
548+
549+
550+
-- !query
551+
select get(null, -1)
552+
-- !query analysis
553+
org.apache.spark.sql.catalyst.ExtendedAnalysisException
554+
{
555+
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
556+
"sqlState" : "42K09",
557+
"messageParameters" : {
558+
"inputSql" : "\"NULL\"",
559+
"inputType" : "\"VOID\"",
560+
"paramIndex" : "first",
561+
"requiredType" : "(\"ARRAY\")",
562+
"sqlExpr" : "\"NULL[-1]\""
563+
},
564+
"queryContext" : [ {
565+
"objectType" : "",
566+
"objectName" : "",
567+
"startIndex" : 8,
568+
"stopIndex" : 20,
569+
"fragment" : "get(null, -1)"
570+
} ]
571+
}
572+
573+
574+
-- !query
575+
select get(null, null)
576+
-- !query analysis
577+
org.apache.spark.sql.catalyst.ExtendedAnalysisException
578+
{
579+
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
580+
"sqlState" : "42K09",
581+
"messageParameters" : {
582+
"inputSql" : "\"NULL\"",
583+
"inputType" : "\"VOID\"",
584+
"paramIndex" : "first",
585+
"requiredType" : "(\"ARRAY\")",
586+
"sqlExpr" : "\"NULL[NULL]\""
587+
},
588+
"queryContext" : [ {
589+
"objectType" : "",
590+
"objectName" : "",
591+
"startIndex" : 8,
592+
"stopIndex" : 22,
593+
"fragment" : "get(null, null)"
594+
} ]
595+
}
596+
597+
598+
-- !query
599+
select get(CAST (null AS string), 0)
600+
-- !query analysis
601+
org.apache.spark.sql.catalyst.ExtendedAnalysisException
602+
{
603+
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
604+
"sqlState" : "42K09",
605+
"messageParameters" : {
606+
"inputSql" : "\"CAST(NULL AS STRING)\"",
607+
"inputType" : "\"STRING\"",
608+
"paramIndex" : "first",
609+
"requiredType" : "(\"ARRAY\")",
610+
"sqlExpr" : "\"CAST(NULL AS STRING)[0]\""
611+
},
612+
"queryContext" : [ {
613+
"objectType" : "",
614+
"objectName" : "",
615+
"startIndex" : 8,
616+
"stopIndex" : 36,
617+
"fragment" : "get(CAST (null AS string), 0)"
618+
} ]
619+
}
620+
621+
430622
-- !query
431623
select array_insert(array(1, 2, 3), 3, 4)
432624
-- !query analysis

0 commit comments

Comments
 (0)