Skip to content

Commit

Permalink
Update dependency scala to v2.13.15 (#966)
Browse files Browse the repository at this point in the history
* Update dependency scala to v2.13.15

* Update build.sbt

* bump & sbt +{,macroSub/,structType/}dependencyLockWrite

* silence

---------

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Dmitry Ivankov <[email protected]>
  • Loading branch information
renovate[bot] and dmivankov authored Nov 20, 2024
1 parent ebd4b36 commit caa662d
Show file tree
Hide file tree
Showing 10 changed files with 149 additions and 122 deletions.
5 changes: 3 additions & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import scala.xml.{Node => XmlNode, NodeSeq => XmlNodeSeq, _}
import scala.xml.transform.{RewriteRule, RuleTransformer}

val scala212 = "2.12.19"
val scala213 = "2.13.14"
val scala213 = "2.13.15"
val supportedScalaVersions = List(scala212, scala213)
val sparkVersion = "3.3.4"
val circeVersion = "0.14.9"
Expand Down Expand Up @@ -57,7 +57,8 @@ lazy val commonSettings = Seq(
// and to avoid a dependency on scala-collection-compat
case Some((2, 13)) => Seq(
"-Wconf:src=src/test/scala/cognite/spark/v1/SparkTest.scala&cat=deprecation:i",
"-Wconf:src=src/test/scala/.*&cat=other-pure-statement:i"
"-Wconf:src=src/test/scala/.*&cat=other-pure-statement:i",
"-Wconf:src=src/test/scala/.*&msg=unused value of type org.scalatest.Assertion:s"
)
case Some((2, 12)) => Seq(
"-Wconf:src=src/test/scala/.*&cat=unused:i"
Expand Down
10 changes: 5 additions & 5 deletions build.scala-2.13.sbt.lock
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"lockVersion" : 1,
"timestamp" : "2024-11-13T11:30:56.507065520Z",
"timestamp" : "2024-11-20T12:59:46.331288696Z",
"configurations" : [
"compile",
"optional",
Expand Down Expand Up @@ -2569,11 +2569,11 @@
{
"org" : "org.scala-lang",
"name" : "scala-library",
"version" : "2.13.14",
"version" : "2.13.15",
"artifacts" : [
{
"name" : "scala-library.jar",
"hash" : "sha1:f8b4afe89abe48ca670f620c7da89b71f93e6546"
"hash" : "sha1:ed6f1d58968b16c5f9067d5cac032d952552de58"
}
],
"configurations" : [
Expand All @@ -2586,11 +2586,11 @@
{
"org" : "org.scala-lang",
"name" : "scala-reflect",
"version" : "2.13.14",
"version" : "2.13.15",
"artifacts" : [
{
"name" : "scala-reflect.jar",
"hash" : "sha1:8e275fefb2a01e178db2cdfebb2181062a790b82"
"hash" : "sha1:355927b10366563a8f1b56c1f34ff376f2f7c8c5"
}
],
"configurations" : [
Expand Down
25 changes: 19 additions & 6 deletions macro/build.scala-2.13.sbt.lock
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"lockVersion" : 1,
"timestamp" : "2024-11-13T11:31:01.766754723Z",
"timestamp" : "2024-11-20T12:59:52.180435270Z",
"configurations" : [
"compile",
"optional",
Expand Down Expand Up @@ -2083,28 +2083,41 @@
{
"org" : "org.scala-lang",
"name" : "scala-library",
"version" : "2.13.14",
"version" : "2.13.15",
"artifacts" : [
{
"name" : "scala-library.jar",
"hash" : "sha1:f8b4afe89abe48ca670f620c7da89b71f93e6546"
"hash" : "sha1:ed6f1d58968b16c5f9067d5cac032d952552de58"
}
],
"configurations" : [
"compile",
"provided",
"runtime",
"test"
]
},
{
"org" : "org.scala-lang",
"name" : "scala-library",
"version" : "2.13.8",
"artifacts" : [
{
"name" : "scala-library.jar",
"hash" : "sha1:5a865f03a794b27e6491740c4c419a19e4511a3d"
}
],
"configurations" : [
"provided"
]
},
{
"org" : "org.scala-lang",
"name" : "scala-reflect",
"version" : "2.13.14",
"version" : "2.13.8",
"artifacts" : [
{
"name" : "scala-reflect.jar",
"hash" : "sha1:8e275fefb2a01e178db2cdfebb2181062a790b82"
"hash" : "sha1:994b004d041b18724ec298a135c37e7817d369ec"
}
],
"configurations" : [
Expand Down
2 changes: 1 addition & 1 deletion project/build.properties
Original file line number Diff line number Diff line change
@@ -1 +1 @@
sbt.version=1.9.9
sbt.version=1.10.3
2 changes: 1 addition & 1 deletion project/plugins.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,6 @@ addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.2-1")
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.12.0")
addSbtPlugin("au.com.onegeek" %% "sbt-dotenv" % "2.1.233")
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "2.2.0")
addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.12.1")
addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.13.0")
addSbtPlugin("io.github.davidgregory084" % "sbt-tpolecat" % "0.4.4")
addSbtPlugin("software.purpledragon" % "sbt-dependency-lock" % "1.5.1")
8 changes: 4 additions & 4 deletions src/test/scala/cognite/spark/v1/FileContentRelationTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -122,10 +122,10 @@ class FileContentRelationTest extends FlatSpec with Matchers with SparkTest wit
val result = spark.sqlContext.sql(s"select * from filecontent").collect()
result.map(_.toSeq.toList) should contain theSameElementsAs
Array(
List(30, "Alice", null),
List(25, "Bob", null),
List(35, "Charlie", null),
List(35, "Charlie2", "test")
List[Any](30, "Alice", null),
List[Any](25, "Bob", null),
List[Any](35, "Charlie", null),
List[Any](35, "Charlie2", "test")
)
}

Expand Down
16 changes: 8 additions & 8 deletions src/test/scala/cognite/spark/v1/RawTableRelationTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,7 @@ class RawTableRelationTest

collectToSet[java.sql.Timestamp](dfWithManylastUpdatedTime.select($"lastUpdatedTime"))
collectToSet[JavaLong](dfWithManylastUpdatedTime.select($"_lastUpdatedTime")) should equal(
Set(null, 2))
Set[Any](null, 2))
collectToSet[JavaLong](dfWithManylastUpdatedTime.select($"___lastUpdatedTime")) should equal(
Set(11, 22))
collectToSet[JavaLong](dfWithManylastUpdatedTime.select($"____lastUpdatedTime")) should equal(
Expand Down Expand Up @@ -343,7 +343,7 @@ class RawTableRelationTest
val (columnNames2, unRenamed2) = prepareForInsert(dfWithManylastUpdatedTime)
columnNames2.toSet should equal(
Set("lastUpdatedTime", "__lastUpdatedTime", "___lastUpdatedTime", "value"))
collectToSet[JavaLong](unRenamed2.select($"lastUpdatedTime")) should equal(Set(null, 2))
collectToSet[JavaLong](unRenamed2.select($"lastUpdatedTime")) should equal(Set[Any](null, 2))
collectToSet[JavaLong](unRenamed2.select($"__lastUpdatedTime")) should equal(Set(11, 22))
collectToSet[JavaLong](unRenamed2.select($"___lastUpdatedTime")) should equal(Set(111, 222))
}
Expand Down Expand Up @@ -836,7 +836,7 @@ class RawTableRelationTest
dfWithEmptyStringInByteField
.collect()
.map(_.getAs[Any]("byte"))
.toSet shouldBe Set(null, 1.toByte)
.toSet shouldBe Set[Any](null, 1.toByte)
}

it should "handle empty string as null for Short type" in {
Expand All @@ -846,7 +846,7 @@ class RawTableRelationTest
dfWithEmptyStringInShortField
.collect()
.map(_.getAs[Any]("short"))
.toSet shouldBe Set(null, 12.toShort)
.toSet shouldBe Set[Any](null, 12.toShort)
}

it should "handle empty string as null for Integer type" in {
Expand All @@ -856,7 +856,7 @@ class RawTableRelationTest
dfWithEmptyStringInIntegerField
.collect()
.map(_.getAs[Any]("integer"))
.toSet shouldBe Set(null, 123)
.toSet shouldBe Set[Any](null, 123)
}

it should "handle empty string as null for Long type" in {
Expand All @@ -866,7 +866,7 @@ class RawTableRelationTest
dfWithEmptyStringInLongField
.collect()
.map(_.getAs[Any]("long"))
.toSet shouldBe Set(null, 12345L)
.toSet shouldBe Set[Any](null, 12345L)
}

it should "handle empty string as null for Double type" in {
Expand All @@ -876,7 +876,7 @@ class RawTableRelationTest
dfWithEmptyStringInDoubleField
.collect()
.map(_.getAs[Any]("num"))
.toSet shouldBe Set(null, 12.3)
.toSet shouldBe Set[Any](null, 12.3)
}

it should "handle empty string as null for Boolean type" in {
Expand All @@ -889,7 +889,7 @@ class RawTableRelationTest
dfWithEmptyStringInBooleanField
.collect()
.map(_.getAs[Any]("bool"))
.toSet shouldBe Set(null, true, false)
.toSet shouldBe Set[Any](null, true, false)
}

it should "fail reasonably on invalid types" in {
Expand Down
22 changes: 11 additions & 11 deletions src/test/scala/cognite/spark/v1/SparkSchemaHelperTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -60,19 +60,19 @@ class SparkSchemaHelperTest extends FlatSpec with ParallelTestExecution with Mat
}

it should "construct optional type from Row of null" in {
val r = new GenericRowWithSchema(Array(null, null, null, null, null, null, null, null), structType[TestTypeOption]())
val r = new GenericRowWithSchema(Array[Any](null, null, null, null, null, null, null, null), structType[TestTypeOption]())
fromRow[TestTypeOption](r) should be(
TestTypeOption(None, None, None, None, None, None, None, None))
}

it should "construct optional type from Row of map values that can be null" in {
val r = new GenericRowWithSchema(Array(null, null, null, null, Map("foo" -> "row", "bar" -> "a"), null, null, null), structType[TestTypeOption]())
val r = new GenericRowWithSchema(Array[Any](null, null, null, null, Map("foo" -> "row", "bar" -> "a"), null, null, null), structType[TestTypeOption]())
fromRow[TestTypeOption](r) should be(
TestTypeOption(None, None, None, None, Some(Map("foo" -> "row", "bar" -> "a")), None, None, None))
}

it should "construct optional type from Row of seq values that can be null" in {
val r = new GenericRowWithSchema(Array(null, null, null, null, null, null, Seq(20L, null), null), structType[TestTypeOption]())
val r = new GenericRowWithSchema(Array[Any](null, null, null, null, null, null, Seq[Any](20L, null), null), structType[TestTypeOption]())
fromRow[TestTypeOption](r) should be(
TestTypeOption(None, None, None, None, None, None, Some(Seq(Some(20), None)), None))
}
Expand All @@ -96,13 +96,13 @@ class SparkSchemaHelperTest extends FlatSpec with ParallelTestExecution with Mat
}

it should "ignore null in map" in {
val x = new GenericRowWithSchema(Array(null, null, null, null, Map("foo" -> "row", "bar" -> null), null, null, null), structType[TestTypeOption]())
val x = new GenericRowWithSchema(Array[Any](null, null, null, null, Map("foo" -> "row", "bar" -> null), null, null, null), structType[TestTypeOption]())
val row = fromRow[TestTypeOption](x)
row.x.get shouldBe Map("foo" -> "row")
}

it should "ignore missing fields" in {
val x = new GenericRowWithSchema(Array(1, 2, null), structType[TestTypeOptionalField]())
val x = new GenericRowWithSchema(Array[Any](1, 2, null), structType[TestTypeOptionalField]())
val row = fromRow[TestTypeOption](x)
row.a shouldBe Some(1)
row.b shouldBe Some(2)
Expand All @@ -112,7 +112,7 @@ class SparkSchemaHelperTest extends FlatSpec with ParallelTestExecution with Mat

it should "correctly return OptionalField" in {
val x = new GenericRowWithSchema(
Array(1, null),
Array[Any](1, null),
StructType(Seq(
StructField("a", DataTypes.IntegerType),
StructField("b", DataTypes.IntegerType, nullable = true)
Expand All @@ -124,34 +124,34 @@ class SparkSchemaHelperTest extends FlatSpec with ParallelTestExecution with Mat
}

it should "fail nicely on different type in map" in {
val x = new GenericRowWithSchema(Array(null, null, null, null, Map("foo" -> "row", "bar" -> 1), null, null, null), structType[TestTypeOption]())
val x = new GenericRowWithSchema(Array[Any](null, null, null, null, Map[Any, Any]("foo" -> "row", "bar" -> 1), null, null, null), structType[TestTypeOption]())
val ex = intercept[CdfSparkIllegalArgumentException] { fromRow[TestTypeOption](x) }
ex.getMessage shouldBe "Map with string values was expected, but '1' of type Int was found (under key 'bar' on row [null,null,null,null,Map(foo -> row, bar -> 1),null,null,null])"
}

it should "fail nicely on type mismatch" in {
val x = new GenericRowWithSchema(Array("shouldBeInt", 2.toDouble, 3.toByte,
val x = new GenericRowWithSchema(Array[Any]("shouldBeInt", 2.toDouble, 3.toByte,
4.toFloat, Map("foo" -> "bar"), 5.toLong, Seq[Long](10), "foo"), structType[TestTypeBasic]())
val ex = intercept[CdfSparkIllegalArgumentException] { fromRow[TestTypeBasic](x) }
ex.getMessage shouldBe "Column 'a' was expected to have type Int, but 'shouldBeInt' of type String was found (on row [shouldBeInt,2.0,3,4.0,Map(foo -> bar),5,List(10),foo])."
}

it should "fail nicely on unexpected NULL in int" in {
val x = new GenericRowWithSchema(Array(null, 2.toDouble, 3.toByte,
val x = new GenericRowWithSchema(Array[Any](null, 2.toDouble, 3.toByte,
4.toFloat, Map("foo" -> "bar"), 5.toLong, Seq[Long](10), "foo"), structType[TestTypeBasic]())
val ex = intercept[CdfSparkIllegalArgumentException] { fromRow[TestTypeBasic](x) }
ex.getMessage shouldBe "Column 'a' was expected to have type Int, but NULL was found (on row [null,2.0,3,4.0,Map(foo -> bar),5,List(10),foo])."
}

it should "fail nicely on unexpected NULL in string" in {
val x = new GenericRowWithSchema(Array(1, 2.toDouble, 3.toByte,
val x = new GenericRowWithSchema(Array[Any](1, 2.toDouble, 3.toByte,
4.toFloat, Map("foo" -> "bar"), 5.toLong, Seq[Long](10), null), structType[TestTypeBasic]())
val ex = intercept[CdfSparkIllegalArgumentException] { fromRow[TestTypeBasic](x) }
ex.getMessage shouldBe "Column 's' was expected to have type String, but NULL was found (on row [1,2.0,3,4.0,Map(foo -> bar),5,List(10),null])."
}

it should "fail nicely on unexpected NULL in map" in {
val x = new GenericRowWithSchema(Array(1, 2.toDouble, 3.toByte,
val x = new GenericRowWithSchema(Array[Any](1, 2.toDouble, 3.toByte,
4.toFloat, null, 5.toLong, Seq[Long](10), "foo"), structType[TestTypeBasic]())
val ex = intercept[CdfSparkIllegalArgumentException] { fromRow[TestTypeBasic](x) }
ex.getMessage shouldBe "Column 'x' was expected to have type Map[String,String], but NULL was found (on row [1,2.0,3,4.0,null,5,List(10),foo])."
Expand Down
Loading

0 comments on commit caa662d

Please sign in to comment.