Skip to content

Commit

Permalink
Upgrade to latest versions (#219)
Browse files Browse the repository at this point in the history
* Upgrade to latest versions

Address #217 and #213

* Remove bad options for some scala version

* Simplify ci-cd

* Remove other conflicting scoptions

* Better system for the scalaCoptions

* More fixes

* More fixes

* Try again

* Add sbt cache
  • Loading branch information
leobenkel authored Dec 19, 2022
1 parent bb7ffac commit 1a73ff6
Show file tree
Hide file tree
Showing 18 changed files with 183 additions and 124 deletions.
82 changes: 73 additions & 9 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
name: CI
name: CI-CD
on:
push:
branches: [ main ]
tags: [ "*" ]
paths-ignore:
- ".gitignore"
- ".bettercodehub.yml"
Expand All @@ -26,22 +28,33 @@ on:
jobs:
build:
strategy:
fail-fast: false
fail-fast: true
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/[email protected]
- name: Setup Scala
uses: olafurpg/setup-scala@v10
- name: Coursier cache
uses: coursier/cache-action@v5
uses: actions/checkout@v2

- name: Cache SBT
uses: actions/cache@v3
with:
path: |
~/.ivy2/cache
~/.sbt
key: ${{ runner.os }}-sbt-${{ hashFiles('**/build.sbt') }}

- uses: actions/setup-java@v3
with:
distribution: 'adopt'
java-version: '8'
cache: 'sbt'

- name: clean
run: make deep_clean

- name: build
run: |
./scripts/spark-cross-compile.sh
- name: test
run: |
./scripts/spark-cross-fmt.sh
Expand All @@ -50,12 +63,63 @@ jobs:
- name: unit-test-coverage
run: make test_coverage
env:
COVERALLS_REPO_TOKEN: ${{ secrets.GITHUB_TOKEN }}
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}

- name: clean-up
run: |
rm -rf "$HOME/.ivy2/local" || true
find $HOME/Library/Caches/Coursier/v1 -name "ivydata-*.properties" -delete || true
find $HOME/.ivy2/cache -name "ivydata-*.properties" -delete || true
find $HOME/.cache/coursier/v1 -name "ivydata-*.properties" -delete || true
find $HOME/.sbt
release:
needs: build
strategy:
fail-fast: true
runs-on: ubuntu-latest
env:
PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }}
PGP_SECRET: ${{ secrets.PGP_SECRET }}
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }}

steps:
- name: Checkout
uses: actions/checkout@v2

- name: Cache SBT
uses: actions/cache@v3
with:
path: |
~/.ivy2/cache
~/.sbt
key: ${{ runner.os }}-sbt-${{ hashFiles('**/build.sbt') }}

- uses: actions/setup-java@v3
with:
distribution: 'adopt'
java-version: '8'
cache: 'sbt'

- name: release-staging
run: |
./scripts/spark-cross-publish.sh library true
./scripts/spark-cross-publish.sh testHelper true
./scripts/spark-cross-publish.sh libraryConfigsScallop true
if: startsWith(github.ref, 'refs/tags/') != true && github.ref == 'refs/heads/main'

- name: release-production
run: |
./scripts/spark-cross-publish.sh library
./scripts/spark-cross-publish.sh testHelper
./scripts/spark-cross-publish.sh libraryConfigsScallop
if: startsWith(github.ref, 'refs/tags/v')

- name: clean-up
run: |
rm -rf "$HOME/.ivy2/local" || true
find $HOME/Library/Caches/Coursier/v1 -name "ivydata-*.properties" -delete || true
find $HOME/.ivy2/cache -name "ivydata-*.properties" -delete || true
find $HOME/.cache/coursier/v1 -name "ivydata-*.properties" -delete || true
find $HOME/.sbt -name "*.lock" -delete || true
find $HOME/.sbt
47 changes: 0 additions & 47 deletions .github/workflows/release.yml

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ object SparkModule {
}

trait Factory[C <: CommandLineArguments.Service[C]] {
lazy private val sparkBuilder: SparkSession.Builder = SparkSession.builder
lazy private val sparkBuilder: SparkSession.Builder = SparkSession.builder()
lazy private val sparkBuilderWithName: SparkSession.Builder = sparkBuilder.appName(appName)

protected def appName: String
Expand Down
8 changes: 4 additions & 4 deletions Library/src/main/scala/com/leobenkel/zparkio/implicits.scala
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,13 @@ object implicits {

def apply[A](f: SparkSession => Dataset[A]): ZDS[A] = ZDS.map(f)

def make[A <: Product : TypeTag : ClassTag, B <: Product : TypeTag : ClassTag](
def make[A <: Product, B <: Product : TypeTag](
input: Dataset[A]
)(
f: Dataset[A] => Encoder[B] => Dataset[B]
): ZDS[B] = ZDS(spark => f(input)(spark.implicits.newProductEncoder[B]))

def apply[A <: Product : TypeTag : ClassTag](data: A*): ZDS[A] =
def apply[A <: Product : TypeTag](data: A*): ZDS[A] =
apply { spark =>
import spark.implicits._
data.toDS()
Expand All @@ -53,10 +53,10 @@ object implicits {
}

implicit class DatasetZ[R, A](zds: => ZIO[R, Throwable, Dataset[A]]) extends Serializable {
def mapDS[B <: Product : TypeTag : ClassTag](f: A => B): ZDS_R[R, B] =
def mapDS[B <: Product : TypeTag](f: A => B): ZDS_R[R, B] =
SparkModule().flatMap(spark => zds.map(_.map(f)(spark.implicits.newProductEncoder[B])))

def zMap[B <: Product : TypeTag : ClassTag](
def zMap[B <: Product : TypeTag](
f: A => ZIO[Any, Throwable, B]
): ZDS_R[R, B] =
ZDS.flatMapR[R, B] { spark =>
Expand Down
94 changes: 75 additions & 19 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,15 @@ val projectName = IO.readLines(new File("PROJECT_NAME")).head
val v = IO.readLines(new File("VERSION")).head
val sparkVersions: List[String] = IO.readLines(new File("sparkVersions")).map(_.trim)

val scala11 = "2.11.12"
val scala12 = "2.12.16"
val Scala11 = "2.11.12"
val Scala12 = "2.12.17"
val Scala13 = "2.13.10"

val Spark233 = "2.3.3"
val Spark245 = "2.4.5"
val Spark312 = "3.1.2"
val Spark23 = "2.3.4"
val Spark24 = "2.4.8"
val Spark31 = "3.1.3"
val Spark32 = "3.2.3"
val Spark33 = "3.3.1"

val sparkVersionSystem = System.getProperty("sparkVersion", sparkVersions.head)
val sparkVersion = settingKey[String]("Spark version")
Expand All @@ -29,13 +32,17 @@ lazy val rootSettings =
sparkVersion := sparkVersionSystem,
crossScalaVersions := {
sparkVersion.value match {
case Spark233 => Seq(scala11)
case Spark245 => Seq(scala11, scala12)
case Spark312 => Seq(scala12)
case Spark23 => Seq(Scala11)
case Spark24 => Seq(Scala12, Scala11)
case Spark31 => Seq(Scala12)
case Spark32 => Seq(Scala13, Scala12)
case Spark33 => Seq(Scala13, Scala12)
case s =>
throw new Exception(s"crossScalaVersions: Do not know what to do with spark version $s")
}
},
scalaVersion := crossScalaVersions.value.head,
resolvers += Resolver.sonatypeRepo("releases"),
resolvers ++= Resolver.sonatypeOssRepos("releases"),
soteriaAddSemantic := false,
version ~= (v => s"${sparkVersionSystem}_$v"),
dynver ~= (v => s"${sparkVersionSystem}_$v")
Expand All @@ -49,16 +56,57 @@ lazy val commonSettings =
libraryDependencies ++=
Seq(
// https://zio.dev/docs/getting_started.html
"dev.zio" %% "zio" % zioVersion,
// https://mvnrepository.com/artifact/org.apache.spark/spark-core
"org.apache.spark" %% "spark-core" % sparkVersion.value % Provided,
// https://mvnrepository.com/artifact/org.apache.spark/spark-sql
"org.apache.spark" %% "spark-sql" % sparkVersion.value % Provided,
"org.scalatest" %% "scalatest" % "3.2.14" % Test
"dev.zio" %% "zio" % zioVersion,

// SPARK
"org.apache.spark" %% "spark-core" % sparkVersion.value,
"org.apache.spark" %% "spark-streaming" % sparkVersion.value,
"org.apache.spark" %% "spark-sql" % sparkVersion.value,
"org.apache.spark" %% "spark-hive" % sparkVersion.value,
"org.apache.spark" %% "spark-catalyst" % sparkVersion.value,
"org.apache.spark" %% "spark-yarn" % sparkVersion.value,
"org.apache.spark" %% "spark-mllib" % sparkVersion.value,

// TEST
"org.scalatest" %% "scalatest" % "3.2.14" % Test
),
libraryDependencies ++= {
sparkVersion.value match {
case Spark23 | Spark24 => Seq(
"org.apache.xbean" % "xbean-asm6-shaded" % "4.10"
)
case Spark31 | Spark32 | Spark33 => Seq(
"io.netty" % "netty-all" % "4.1.77.Final",
"io.netty" % "netty-buffer" % "4.1.77.Final",
"io.netty" % "netty-tcnative-classes" % "2.0.52.Final"
)
case _ => Seq.empty
}
},
updateOptions := updateOptions.value.withGigahorse(false),
Test / publishArtifact := false,
pomIncludeRepository := (_ => false)
pomIncludeRepository := (_ => false),
scalacOptions ++= {
scalaVersion.value match {
case Scala11 | Scala12 => Seq(
"-Ywarn-inaccessible",
"-Ywarn-unused-import"
)
case Scala13 => Seq.empty
case s => throw new Exception(s"scalacOptions: Unknown mapping for scala version $s")
}
}
// Can be needed in the future
// Compile / unmanagedSourceDirectories ++= {
// val pathWith: String => File = (p: String) => baseDirectory.value / "src" / "main" / p
// scalaVersion.value match {
// case Scala13 => Seq(pathWith("scala2.13"))
// case Scala11 | Scala12 => Seq(pathWith("scala2"))
// case s => throw new Exception(
// s"unmanagedSourceDirectories: Unknown mapping for scala version $s"
// )
// }
// }
)

lazy val root = (project in file("."))
Expand All @@ -73,11 +121,15 @@ lazy val library = (project in file("Library")).settings(
name := projectName
)

lazy val sparkTestingBaseVersion =
lazy val sparkTestingBaseVersion: String =
sparkVersionSystem match {
// https://mvnrepository.com/artifact/com.holdenkarau/spark-testing-base
case Spark312 => "3.1.2_1.1.0"
case _ => s"${sparkVersionSystem}_0.14.0"
case Spark23 => "2.3.3_0.14.0"
case Spark24 => "2.4.8_1.3.0"
case Spark31 => "3.1.2_1.3.0"
case Spark32 => "3.2.2_1.3.0"
case Spark33 => "3.3.1_1.3.0"
case s => throw new Exception(s"sparkTestingBaseVersion: Unknown mapping for spark version $s")
}

lazy val testHelper = (project in file("testModules/TestHelper"))
Expand Down Expand Up @@ -142,3 +194,7 @@ lazy val example2Small = (project in file("examples/Example2_small"))
libraryConfigsScallop,
testHelper % Test
)

// https://github.com/sbt/sbt/issues/6997#issuecomment-1310637232
ThisBuild / libraryDependencySchemes +=
"org.scala-lang.modules" %% "scala-xml" % VersionScheme.Always
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@ object CommandLineArgumentScallop {
val (active, inactive) =
filteredSummary(Set.empty).split('\n').partition(_.trim.startsWith("*"))

(active.sorted :+ "") ++ inactive.sorted
val a: Seq[String] = active.sorted.toSeq :+ ""
a ++ inactive.sorted
}

final val env: ScallopOption[Environment] =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ class CommandLineArgumentScallopTest extends AnyFreeSpec {
.run(
arg.checkValidity().tapError { case h: HelpHandlerException => h.printHelpMessage }
) match {
case Success(a) => assert(true)
case Success(_) => assert(true)
case Failure(ex) => assertThrows[HelpHandlerException](throw ex.squash)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ object FileIO {
private trait LiveService extends FileIO.Service {
override protected def readFileContent(path: String): Seq[String] = {
val file = Source.fromFile(path)
val content = file.getLines().toArray
val content = file.getLines().toSeq
file.close()
content
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package com.leobenkel.example1

import com.leobenkel.zparkio.ZparkioApp.ZIOEnv
import com.leobenkel.zparkiotest.TestWithSpark
import org.scalatest.Assertions
import org.scalatest.freespec.AnyFreeSpec
import zio.{Runtime, Unsafe, ZIO}
import zio.Exit.{Failure, Success}
Expand All @@ -19,7 +20,7 @@ class ApplicationTest extends AnyFreeSpec with TestWithSpark {
case Success(value) =>
println(s"Read: $value")
assertResult(0)(value)
case Failure(cause) => fail(cause.prettyPrint)
case Failure(cause) => Assertions.fail(cause.prettyPrint)
}
}

Expand All @@ -34,7 +35,7 @@ class ApplicationTest extends AnyFreeSpec with TestWithSpark {
case Success(value) =>
println(s"Read: $value")
assertResult(1)(value)
case Failure(cause) => fail(cause.prettyPrint)
case Failure(cause) => Assertions.fail(cause.prettyPrint)
}
}

Expand All @@ -44,7 +45,7 @@ class ApplicationTest extends AnyFreeSpec with TestWithSpark {
case Success(value) =>
println(s"Read: $value")
assertResult(0)(value)
case Failure(cause) => fail(cause.prettyPrint)
case Failure(cause) => Assertions.fail(cause.prettyPrint)
}
}
}
Expand Down
Loading

0 comments on commit 1a73ff6

Please sign in to comment.