Skip to content

Commit

Permalink
Cats imports fixed
Browse files Browse the repository at this point in the history
Sbt prepared for 2.4 version
  • Loading branch information
alfonsorr committed Apr 11, 2022
1 parent 5159ac7 commit e03a21e
Show file tree
Hide file tree
Showing 14 changed files with 29 additions and 20 deletions.
13 changes: 11 additions & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,20 @@ import sbt.{Compile, Def}
val stableVersion = "0.0.3"

val sparkDefaultShortVersion = "3.2"
val spark24Version = "2.4.8"
val spark30Version = "3.0.3"
val spark31Version = "3.1.3"
val spark32Version = "3.2.1"

val versionRegex = """^(.*)\.(.*)\.(.*)$""".r
val versionRegexShort = """^(.*)\.(.*)$""".r

val scala211 = "2.11.12"
val scala212 = "2.12.15"
val scala213 = "2.13.8"

val parserSparkVersion: String => String = {
case versionRegexShort("2", "4") => spark24Version
case versionRegexShort("3", "0") => spark30Version
case versionRegexShort("3", "1") => spark31Version
case versionRegexShort("3", "2") => spark32Version
Expand All @@ -25,11 +28,17 @@ val sparkLong2ShortVersion: String => String = { case versionRegex(a, b, _) =>
}

val scalaVersionSelect: String => String = {
case versionRegex("2", _, _) => scala211
case versionRegex("3", "0", _) => scala212
case versionRegex("3", "1", _) => scala212
case versionRegex("3", "2", _) => scala212
}

val catsVersion: String => String = {
case versionRegex("2", _, _) => "2.0.0"
case _ => "2.7.0"
}

ThisBuild / organization := "org.hablapps"
ThisBuild / homepage := Some(url("https://github.com/hablapps/doric"))
ThisBuild / licenses := List(
Expand Down Expand Up @@ -98,8 +107,8 @@ lazy val core = project
scalaVersion := scalaVersionSelect(sparkVersion.value),
libraryDependencies ++= Seq(
"org.apache.spark" %% "spark-sql" % sparkVersion.value % "provided", // scala-steward:off
"org.typelevel" %% "cats-core" % "2.7.0",
"com.lihaoyi" %% "sourcecode" % "0.2.8",
"org.typelevel" %% "cats-core" % catsVersion(sparkVersion.value),
"com.lihaoyi" %% "sourcecode" % "0.2.8",
"com.github.mrpowers" %% "spark-fast-tests" % "1.2.0" % "test",
"org.scalatest" %% "scalatest" % "3.2.11" % "test"
),
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/DoricColumn.scala
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package doric

import cats.data.{Kleisli, NonEmptyChain, Validated}
import cats.implicits.{catsSyntaxApplicativeId, catsSyntaxTuple2Semigroupal, catsSyntaxValidatedId, catsSyntaxValidatedIdBinCompat0}
import cats.implicits._
import doric.sem.{ColumnTypeError, DoricSingleError, Location, SparkErrorWrapper}
import doric.syntax.ColGetters
import doric.types.{LiteralSparkType, SparkType}
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/DoricJoinColumn.scala
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package doric

import cats.implicits.catsSyntaxTuple2Semigroupal
import cats.implicits._
import doric.syntax.ColGetters

import org.apache.spark.sql.Column
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/JoinSideDoricColumn.scala
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package doric

import cats.data.Kleisli
import cats.implicits.catsSyntaxTuple2Semigroupal
import cats.implicits._

import org.apache.spark.sql.{Column, Dataset}

Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/sem/TransformOps.scala
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package doric
package sem

import cats.implicits.toTraverseOps
import cats.implicits._

import org.apache.spark.sql.{DataFrame, Dataset}
import org.apache.spark.sql.doric.DataFrameExtras
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/syntax/AggregationColumns.scala
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package doric
package syntax

import cats.implicits.{catsSyntaxTuple2Semigroupal, toTraverseOps}
import cats.implicits._
import doric.types.NumericType

import org.apache.spark.sql.{Column, functions => f}
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/syntax/BinaryColumns.scala
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package doric
package syntax

import cats.implicits.toTraverseOps
import cats.implicits._
import doric.types.{BinaryType, SparkType}

import org.apache.spark.sql.{functions => f}
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/syntax/StringColumns.scala
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package doric
package syntax

import cats.implicits.{catsSyntaxTuple2Semigroupal, catsSyntaxTuple3Semigroupal, catsSyntaxTuple4Semigroupal, toTraverseOps}
import cats.implicits._

import org.apache.spark.sql.{Column, functions => f}
import org.apache.spark.sql.catalyst.expressions._
Expand Down
8 changes: 3 additions & 5 deletions core/src/main/scala/doric/syntax/TimestampColumns.scala
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
package doric
package syntax

import cats.implicits.catsSyntaxTuple2Semigroupal
import cats.implicits._
import doric.types.TimestampType
import org.apache.spark.sql.catalyst.expressions.{FromUTCTimestamp, ToUTCTimestamp}
import org.apache.spark.sql.{Column, functions => f}

import java.sql.Timestamp

import org.apache.spark.sql.{functions => f}
import org.apache.spark.sql.{Column, functions => f}
import org.apache.spark.sql.catalyst.expressions.{FromUTCTimestamp, ToUTCTimestamp}

private[syntax] trait TimestampColumns {

Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/syntax/TypeMatcher.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ package doric
package syntax

import cats.data.Kleisli
import cats.implicits.catsSyntaxValidatedIdBinCompat0
import cats.implicits._
import doric.sem.{ColumnMultyTypeError, Location}
import doric.types.SparkType

Expand Down
6 changes: 4 additions & 2 deletions core/src/main/scala/doric/syntax/WhenBuilder.scala
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
package doric
package syntax

import cats.implicits.{catsSyntaxApplicativeId, catsSyntaxTuple2Semigroupal, catsSyntaxTuple3Semigroupal}
import cats.implicits._
import doric.types.{Casting, SparkType}
import org.apache.spark.sql.functions.{lit => sparkLit, when => sparkWhen}

import org.apache.spark.sql.functions.{when => sparkWhen}
import org.apache.spark.sql.Column

final private[doric] case class WhenBuilder[T](
Expand All @@ -13,6 +14,7 @@ final private[doric] case class WhenBuilder[T](

/**
* Marks the rest of cases as null values of the provided type
*
* @param dt
* Type class for spark data type
* @return
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package org.apache.spark.sql.doric

import cats.implicits.toTraverseOps
import cats.implicits._
import doric.{DoricColumn, DoricValidated}

import org.apache.spark.sql.{DataFrame, RelationalGroupedDataset}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package doric
package syntax

import cats.implicits.catsSyntaxTuple2Semigroupal
import cats.implicits._
import doric.types.{BinaryType, SparkType}

import org.apache.spark.sql.catalyst.expressions.Decode
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package doric
package syntax

import cats.implicits.catsSyntaxTuple2Semigroupal
import cats.implicits._
import doric.types.{BinaryType, SparkType}

import org.apache.spark.sql.Column
Expand Down

0 comments on commit e03a21e

Please sign in to comment.