Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update dependency com.cognite:cognite-sdk-scala to v2.11.770 #857

Merged
merged 5 commits into from
Oct 31, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ val sparkVersion = "3.3.3"
val circeVersion = "0.14.6"
val sttpVersion = "3.5.2"
val Specs2Version = "4.20.2"
val cogniteSdkVersion = "2.10.769"
val cogniteSdkVersion = "2.12.771"

val prometheusVersion = "0.16.0"
val log4sVersion = "1.10.0"
Expand All @@ -26,7 +26,7 @@ lazy val commonSettings = Seq(
organization := "com.cognite.spark.datasource",
organizationName := "Cognite",
organizationHomepage := Some(url("https://cognite.com")),
version := "3.2." + patchVersion,
version := "3.3." + patchVersion,
isSnapshot := patchVersion.endsWith("-SNAPSHOT"),
crossScalaVersions := supportedScalaVersions,
semanticdbEnabled := true,
Expand Down
14 changes: 11 additions & 3 deletions src/main/scala/cognite/spark/v1/DefaultSource.scala
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ class DefaultSource
new SequenceRowsRelation(config, sequenceId)(sqlContext)
}

@deprecated("wdl support is deprecated", since = "0")
private def createWellDataLayer(
parameters: Map[String, String],
config: RelationConfig,
Expand Down Expand Up @@ -174,7 +175,7 @@ class DefaultSource
case FlexibleDataModelRelationFactory.ResourceType =>
createFlexibleDataModelRelation(parameters, config, sqlContext)
case "welldatalayer" =>
createWellDataLayer(parameters, config, sqlContext)
createWellDataLayer(parameters, config, sqlContext): @annotation.nowarn
case _ => sys.error("Unknown resource type: " + resourceType)
}
}
Expand Down Expand Up @@ -241,7 +242,7 @@ class DefaultSource
case FlexibleDataModelRelationFactory.ResourceType =>
createFlexibleDataModelRelation(parameters, config, sqlContext)
case "welldatalayer" =>
createWellDataLayer(parameters, config, sqlContext)
createWellDataLayer(parameters, config, sqlContext): @annotation.nowarn
case _ => sys.error(s"Resource type $resourceType does not support save()")
}
val batchSizeDefault = relation match {
Expand Down Expand Up @@ -346,7 +347,14 @@ object DefaultSource {
}
clientId <- parameters.get("clientId")
clientSecret <- parameters.get("clientSecret")
clientCredentials = OAuth2.ClientCredentials(tokenUri, clientId, clientSecret, scopes, audience)
project <- parameters.get("project")
clientCredentials = OAuth2.ClientCredentials(
tokenUri,
clientId,
clientSecret,
scopes,
project,
audience)
} yield CdfSparkAuth.OAuth2ClientCredentials(clientCredentials)

val session = for {
Expand Down
4 changes: 4 additions & 0 deletions src/main/scala/cognite/spark/v1/wdl/WdlModels.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,24 +3,28 @@ package cognite.spark.v1.wdl
import cognite.spark.v1.CdfSparkException
import io.circe.{Json, JsonObject}

@deprecated("wdl support is deprecated", since = "0")
case class WdlModel(
shortName: String,
ingest: Option[WdlIngestDefinition],
retrieve: WdlRetrieveDefinition
)

@deprecated("wdl support is deprecated", since = "0")
case class WdlIngestDefinition(
schemaName: String,
url: String
)

@deprecated("wdl support is deprecated", since = "0")
case class WdlRetrieveDefinition(
schemaName: String,
url: String,
isGet: Boolean = false,
transformBody: JsonObject => JsonObject = it => it
)

@deprecated("wdl support is deprecated", since = "0")
object WdlModels {
val models: Seq[WdlModel] = Seq(
WdlModel(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import org.apache.spark.{Partition, SparkContext, TaskContext}

import scala.collection.AbstractIterator

@deprecated("wdl support is deprecated", since = "0")
class WellDataLayerRDD(
@transient override val sparkContext: SparkContext,
val schema: StructType,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import org.apache.spark.sql.sources.TableScan
import org.apache.spark.sql.types.{DataType, StructType}
import org.apache.spark.sql.{Row, SQLContext}

@deprecated("wdl support is deprecated", since = "0")
class WellDataLayerRelation(
config: RelationConfig,
model: String
Expand Down
1 change: 1 addition & 0 deletions src/test/scala/cognite/spark/v1/DefaultSourceTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ class DefaultSourceTest extends WordSpec with Matchers {
"value-ClientId",
"value-ClientSecret",
List("value-Scopes"),
"value-Project",
Some("value-Audience")))
)
}
Expand Down
7 changes: 5 additions & 2 deletions src/test/scala/cognite/spark/v1/SparkTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ trait SparkTest {
clientSecret = OIDCWrite.clientSecret,
scopes = List(OIDCWrite.scopes),
audience = Some(OIDCWrite.audience),
cdfProjectName = OIDCWrite.project,
)
implicit val sttpBackend: SttpBackend[IO, Any] = CdpConnector.retryingSttpBackend(15, 30)

Expand Down Expand Up @@ -120,7 +121,8 @@ trait SparkTest {
tokenUri = uri"$readTokenUri",
clientId = readClientId,
clientSecret = readClientSecret,
scopes = List("https://api.cognitedata.com/.default")
scopes = List("https://api.cognitedata.com/.default"),
cdfProjectName = readProject
)

def dataFrameReaderUsingOidc: DataFrameReader =
Expand Down Expand Up @@ -159,7 +161,8 @@ trait SparkTest {
tokenUri = bluefieldTokenUri,
clientId = bluefieldClientId,
clientSecret = bluefieldClientSecret,
scopes = List("https://bluefield.cognitedata.com/.default")
scopes = List("https://bluefield.cognitedata.com/.default"),
cdfProjectName = "extractor-bluefield-testing"
)

val authProvider =
Expand Down
1 change: 1 addition & 0 deletions src/test/scala/cognite/spark/v1/wdl/WdlModelsTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package cognite.spark.v1.wdl
import cognite.spark.v1.{CdfSparkException, SparkTest}
import org.scalatest.{FlatSpec, Matchers}

@deprecated("wdl support is deprecated", since = "0")
class WdlModelsTest extends FlatSpec with Matchers with SparkTest {
it should "get from ingestion name" in {
val wellSource = WdlModels.fromIngestionSchemaName("NptIngestion")
Expand Down