Skip to content

Commit

Permalink
Release spark connector 2.11.1 for spark 3.3
Browse files Browse the repository at this point in the history
  • Loading branch information
Mingli-Rui committed Dec 9, 2022
1 parent ac7c588 commit 995a23b
Show file tree
Hide file tree
Showing 5 changed files with 38 additions and 18 deletions.
11 changes: 7 additions & 4 deletions .github/workflows/IntegrationTest_2.12.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,10 @@ jobs:
SPARK_CONN_ENV_USE_COPY_UNLOAD: ${{ matrix.use_copy_unload }}
SPARK_CONN_ENV_INTERNAL_EXECUTE_QUERY_IN_SYNC_MODE: ${{ matrix.run_query_in_async }}

- uses: codecov/codecov-action@v2
if: matrix.use_copy_unload != 'true' || matrix.cloud_provider != 'gcp'
with:
fail_ci_if_error: true
# Doesn't need to care about test code coverage for release test
# code coverage report upload is unstable
# - uses: codecov/codecov-action@v2
# if: matrix.use_copy_unload != 'true' || matrix.cloud_provider != 'gcp'
# with:
# fail_ci_if_error: true

11 changes: 7 additions & 4 deletions .github/workflows/IntegrationTest_2.13.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,10 @@ jobs:
SPARK_CONN_ENV_USE_COPY_UNLOAD: ${{ matrix.use_copy_unload }}
SPARK_CONN_ENV_INTERNAL_EXECUTE_QUERY_IN_SYNC_MODE: ${{ matrix.run_query_in_async }}

- uses: codecov/codecov-action@v2
if: matrix.use_copy_unload != 'true' || matrix.cloud_provider != 'gcp'
with:
fail_ci_if_error: true
# Doesn't need to care about test code coverage for release test
# code coverage report upload is unstable
# - uses: codecov/codecov-action@v2
# if: matrix.use_copy_unload != 'true' || matrix.cloud_provider != 'gcp'
# with:
# fail_ci_if_error: true

11 changes: 7 additions & 4 deletions .github/workflows/IntegrationTest_gcp_2.12.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,10 @@ jobs:
SPARK_CONN_ENV_USE_COPY_UNLOAD: ${{ matrix.use_copy_unload }}
SPARK_CONN_ENV_INTERNAL_EXECUTE_QUERY_IN_SYNC_MODE: ${{ matrix.run_query_in_async }}

- uses: codecov/codecov-action@v2
if: matrix.use_copy_unload != 'true' || matrix.cloud_provider != 'gcp'
with:
fail_ci_if_error: true
# Doesn't need to care about test code coverage for release test
# code coverage report upload is unstable
# - uses: codecov/codecov-action@v2
# if: matrix.use_copy_unload != 'true' || matrix.cloud_provider != 'gcp'
# with:
# fail_ci_if_error: true

11 changes: 7 additions & 4 deletions .github/workflows/IntegrationTest_gcp_2.13.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,10 @@ jobs:
SPARK_CONN_ENV_USE_COPY_UNLOAD: ${{ matrix.use_copy_unload }}
SPARK_CONN_ENV_INTERNAL_EXECUTE_QUERY_IN_SYNC_MODE: ${{ matrix.run_query_in_async }}

- uses: codecov/codecov-action@v2
if: matrix.use_copy_unload != 'true' || matrix.cloud_provider != 'gcp'
with:
fail_ci_if_error: true
# Doesn't need to care about test code coverage for release test
# code coverage report upload is unstable
# - uses: codecov/codecov-action@v2
# if: matrix.use_copy_unload != 'true' || matrix.cloud_provider != 'gcp'
# with:
# fail_ci_if_error: true

12 changes: 10 additions & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,19 @@ lazy val root = project.withId("spark-snowflake").in(file("."))
organization := "net.snowflake",
version := s"${sparkConnectorVersion}-spark_3.3",
scalaVersion := sys.props.getOrElse("SPARK_SCALA_VERSION", default = "2.12.11"),
// Spark 3.2 supports scala 2.12 and 2.13
// Spark 3.2/3.3 supports scala 2.12 and 2.13
crossScalaVersions := Seq("2.12.11", "2.13.9"),
javacOptions ++= Seq("-source", "1.8", "-target", "1.8"),
licenses += "Apache-2.0" -> url("http://opensource.org/licenses/Apache-2.0"),
credentials += Credentials(Path.userHome / ".ivy2" / ".credentials"),
// Set up GPG key for release build from environment variable: GPG_HEX_CODE
// Build jenkins job must have set it, otherwise, the release build will fail.
credentials += Credentials(
"GnuPG Key ID",
"gpg",
Properties.envOrNone("GPG_HEX_CODE").getOrElse("Jenkins_build_not_set_GPG_HEX_CODE"),
"ignored" // this field is ignored; passwords are supplied by pinentry
),
resolvers +=
"Sonatype OSS Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots",
libraryDependencies ++= Seq(
Expand Down Expand Up @@ -82,7 +90,7 @@ lazy val root = project.withId("spark-snowflake").in(file("."))
Test / javaOptions ++= Seq("-Xms1024M", "-Xmx4096M"),

// Release settings
usePgpKeyHex(Properties.envOrElse("GPG_SIGNATURE", "12345")),
// usePgpKeyHex(Properties.envOrElse("GPG_SIGNATURE", "12345")),
Global / pgpPassphrase := Properties.envOrNone("GPG_KEY_PASSPHRASE").map(_.toCharArray),

publishMavenStyle := true,
Expand Down

0 comments on commit 995a23b

Please sign in to comment.