Skip to content

Commit 73b50e9

Browse files
cty123LuciferYang
authored andcommitted
[SPARK-54205][CONNECT] Supports Decimal type data in SparkConnectResultSet
### What changes were proposed in this pull request? Spark connect has supported JDBC protocol with a few commonly used SQL data types. But currently it's missing the support for Decimal data which is also very commonly used to store money objects. I would like to have it support Decimal data type. ### Why are the changes needed? Right now, a user is able to read Decimal data from SQL by converting the data to string, and then parse the string into Java BigDecimal object. But since JDBC driver is already able to fetch the data as Java BigDecimal type, we can save the effort converting it back and forth. Instead, we just pass through the data we obtain from the raw JDBC result set. ### Does this PR introduce _any_ user-facing change? It's part of a new feature under Spark connect JDBC support. ### How was this patch tested? I have created a test new unit test named **'get decimal type'** and it covers my changes. Also the test case aligns with the tests for fetching other data types. ### Was this patch authored or co-authored using generative AI tooling? No Closes #52947 from cty123/cty123/support-spark-connect-decimaltype. Lead-authored-by: cty123 <[email protected]> Co-authored-by: cty <[email protected]> Signed-off-by: yangjie01 <[email protected]>
1 parent 6466ed5 commit 73b50e9

File tree

3 files changed

+55
-4
lines changed

3 files changed

+55
-4
lines changed

sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectResultSet.scala

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -257,11 +257,17 @@ class SparkConnectResultSet(
257257
override def getCharacterStream(columnLabel: String): Reader =
258258
throw new SQLFeatureNotSupportedException
259259

260-
override def getBigDecimal(columnIndex: Int): java.math.BigDecimal =
261-
throw new SQLFeatureNotSupportedException
260+
override def getBigDecimal(columnIndex: Int): java.math.BigDecimal = {
261+
if (currentRow.isNullAt(columnIndex - 1)) {
262+
_wasNull = true
263+
return null
264+
}
265+
_wasNull = false
266+
currentRow.getDecimal(columnIndex - 1)
267+
}
262268

263269
override def getBigDecimal(columnLabel: String): java.math.BigDecimal =
264-
throw new SQLFeatureNotSupportedException
270+
getBigDecimal(findColumn(columnLabel))
265271

266272
override def isBeforeFirst: Boolean = {
267273
checkOpen()

sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
package org.apache.spark.sql.connect.client.jdbc.util
1919

2020
import java.lang.{Boolean => JBoolean, Byte => JByte, Double => JDouble, Float => JFloat, Long => JLong, Short => JShort}
21+
import java.math.{BigDecimal => JBigDecimal}
2122
import java.sql.{Array => _, _}
2223

2324
import org.apache.spark.sql.types._
@@ -34,6 +35,7 @@ private[jdbc] object JdbcTypeUtils {
3435
case FloatType => Types.FLOAT
3536
case DoubleType => Types.DOUBLE
3637
case StringType => Types.VARCHAR
38+
case _: DecimalType => Types.DECIMAL
3739
case other =>
3840
throw new SQLFeatureNotSupportedException(s"DataType $other is not supported yet.")
3941
}
@@ -48,12 +50,14 @@ private[jdbc] object JdbcTypeUtils {
4850
case FloatType => classOf[JFloat].getName
4951
case DoubleType => classOf[JDouble].getName
5052
case StringType => classOf[String].getName
53+
case _: DecimalType => classOf[JBigDecimal].getName
5154
case other =>
5255
throw new SQLFeatureNotSupportedException(s"DataType $other is not supported yet.")
5356
}
5457

5558
def isSigned(field: StructField): Boolean = field.dataType match {
56-
case ByteType | ShortType | IntegerType | LongType | FloatType | DoubleType => true
59+
case ByteType | ShortType | IntegerType | LongType | FloatType | DoubleType |
60+
_: DecimalType => true
5761
case NullType | BooleanType | StringType => false
5862
case other =>
5963
throw new SQLFeatureNotSupportedException(s"DataType $other is not supported yet.")
@@ -69,6 +73,7 @@ private[jdbc] object JdbcTypeUtils {
6973
case FloatType => 7
7074
case DoubleType => 15
7175
case StringType => 255
76+
case DecimalType.Fixed(p, _) => p
7277
case other =>
7378
throw new SQLFeatureNotSupportedException(s"DataType $other is not supported yet.")
7479
}
@@ -77,6 +82,7 @@ private[jdbc] object JdbcTypeUtils {
7782
case FloatType => 7
7883
case DoubleType => 15
7984
case NullType | BooleanType | ByteType | ShortType | IntegerType | LongType | StringType => 0
85+
case DecimalType.Fixed(_, s) => s
8086
case other =>
8187
throw new SQLFeatureNotSupportedException(s"DataType $other is not supported yet.")
8288
}
@@ -90,6 +96,12 @@ private[jdbc] object JdbcTypeUtils {
9096
case DoubleType => 24
9197
case StringType =>
9298
getPrecision(field)
99+
// precision + negative sign + leading zero + decimal point, like DECIMAL(5,5) = -0.12345
100+
case DecimalType.Fixed(p, s) if p == s => p + 3
101+
// precision + negative sign, like DECIMAL(5,0) = -12345
102+
case DecimalType.Fixed(p, s) if s == 0 => p + 1
103+
// precision + negative sign + decimal point, like DECIMAL(5,2) = -123.45
104+
case DecimalType.Fixed(p, _) => p + 2
93105
case other =>
94106
throw new SQLFeatureNotSupportedException(s"DataType $other is not supported yet.")
95107
}

sql/connect/client/jdbc/src/test/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectJdbcDataTypeSuite.scala

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -215,4 +215,37 @@ class SparkConnectJdbcDataTypeSuite extends ConnectFunSuite with RemoteSparkSess
215215
assert(metaData.getColumnDisplaySize(1) === 255)
216216
}
217217
}
218+
219+
test("get decimal type") {
220+
Seq(
221+
("123.45", 37, 2, 39),
222+
("-0.12345", 5, 5, 8),
223+
("-0.12345", 6, 5, 8),
224+
("-123.45", 5, 2, 7),
225+
("12345", 5, 0, 6),
226+
("-12345", 5, 0, 6)
227+
).foreach {
228+
case (value, precision, scale, expectedColumnDisplaySize) =>
229+
val decimalType = s"DECIMAL($precision,$scale)"
230+
withExecuteQuery(s"SELECT cast('$value' as $decimalType)") { rs =>
231+
assert(rs.next())
232+
assert(rs.getBigDecimal(1) === new java.math.BigDecimal(value))
233+
assert(!rs.wasNull)
234+
assert(!rs.next())
235+
236+
val metaData = rs.getMetaData
237+
assert(metaData.getColumnCount === 1)
238+
assert(metaData.getColumnName(1) === s"CAST($value AS $decimalType)")
239+
assert(metaData.getColumnLabel(1) === s"CAST($value AS $decimalType)")
240+
assert(metaData.getColumnType(1) === Types.DECIMAL)
241+
assert(metaData.getColumnTypeName(1) === decimalType)
242+
assert(metaData.getColumnClassName(1) === "java.math.BigDecimal")
243+
assert(metaData.isSigned(1) === true)
244+
assert(metaData.getPrecision(1) === precision)
245+
assert(metaData.getScale(1) === scale)
246+
assert(metaData.getColumnDisplaySize(1) === expectedColumnDisplaySize)
247+
assert(metaData.getColumnDisplaySize(1) >= value.size)
248+
}
249+
}
250+
}
218251
}

0 commit comments

Comments
 (0)