Skip to content

Commit

Permalink
sequences should support update
Browse files Browse the repository at this point in the history
  • Loading branch information
Jacob-Eliat-Eliat committed Nov 15, 2024
1 parent 9841355 commit 4fbf3b4
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 2 deletions.
2 changes: 1 addition & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ lazy val commonSettings = Seq(
crossScalaVersions := supportedScalaVersions,
semanticdbEnabled := true,
semanticdbVersion := scalafixSemanticdb.revision,
scalaVersion := scala212, // default to Scala 2.12
scalaVersion := scala213, // default to Scala 2.12
// handle cross plugin https://github.com/stringbean/sbt-dependency-lock/issues/13
dependencyLockFile := { baseDirectory.value / s"build.scala-${CrossVersion.partialVersion(scalaVersion.value) match { case Some((2, n)) => s"2.$n" }}.sbt.lock" },
description := "Spark data source for the Cognite Data Platform.",
Expand Down
3 changes: 2 additions & 1 deletion src/main/scala/cognite/spark/v1/SequencesRelation.scala
Original file line number Diff line number Diff line change
Expand Up @@ -223,14 +223,15 @@ object SequenceRelation
with ReadSchema
with InsertSchema
with DeleteWithIdSchema
with UpdateSchema
with NamedRelation {
override val name: String = "sequences"
import cognite.spark.compiletime.macros.StructTypeEncoderMacro._

override val upsertSchema: StructType = structType[SequenceUpsertSchema]()
override val insertSchema: StructType = structType[SequenceInsertSchema]()
override val readSchema: StructType = structType[SequenceReadSchema]()

override val updateSchema: StructType = upsertSchema
}

final case class SequenceColumnUpsertSchema(
Expand Down

0 comments on commit 4fbf3b4

Please sign in to comment.