Skip to content

Commit

Permalink
typelevel#787 - fix test to run on dbr 14.3
Browse files Browse the repository at this point in the history
  • Loading branch information
chris-twiner committed Mar 12, 2024
1 parent 1008b85 commit dd10cee
Showing 1 changed file with 83 additions and 44 deletions.
127 changes: 83 additions & 44 deletions dataset/src/test/scala/frameless/CreateTests.scala
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package frameless

import org.scalacheck.{Arbitrary, Prop}
import org.scalacheck.{ Arbitrary, Prop }
import org.scalacheck.Prop._

import scala.reflect.ClassTag
Expand All @@ -13,29 +13,40 @@ class CreateTests extends TypedDatasetSuite with Matchers {

test("creation using X4 derived DataFrames") {
def prop[
A: TypedEncoder,
B: TypedEncoder,
C: TypedEncoder,
D: TypedEncoder](data: Vector[X4[A, B, C, D]]): Prop = {
A: TypedEncoder,
B: TypedEncoder,
C: TypedEncoder,
D: TypedEncoder
](data: Vector[X4[A, B, C, D]]
): Prop = {
val ds = TypedDataset.create(data)
TypedDataset.createUnsafe[X4[A, B, C, D]](ds.toDF()).collect().run() ?= data
TypedDataset
.createUnsafe[X4[A, B, C, D]](ds.toDF())
.collect()
.run() ?= data
}

check(forAll(prop[Int, Char, X2[Option[Country], Country], Int] _))
check(forAll(prop[X2[Int, Int], Int, Boolean, Vector[Food]] _))
check(forAll(prop[String, Food, X3[Food, Country, Boolean], Int] _))
check(forAll(prop[String, Food, X3U[Food, Country, Boolean], Int] _))
check(forAll(prop[
Option[Vector[Food]],
Vector[Vector[X2[Vector[(Person, X1[Char])], Country]]],
X3[Food, Country, String],
Vector[(Food, Country)]] _))
check(
forAll(
prop[Option[Vector[Food]], Vector[
Vector[X2[Vector[(Person, X1[Char])], Country]]
], X3[Food, Country, String], Vector[(Food, Country)]] _
)
)
}

test("array fields") {
def prop[T: Arbitrary: TypedEncoder: ClassTag] = forAll {
(d1: Array[T], d2: Array[Option[T]], d3: Array[X1[T]], d4: Array[X1[Option[T]]],
d5: X1[Array[T]]) =>
(d1: Array[T],
d2: Array[Option[T]],
d3: Array[X1[T]],
d4: Array[X1[Option[T]]],
d5: X1[Array[T]]
) =>
TypedDataset.create(Seq(d1)).collect().run().head.sameElements(d1) &&
TypedDataset.create(Seq(d2)).collect().run().head.sameElements(d2) &&
TypedDataset.create(Seq(d3)).collect().run().head.sameElements(d3) &&
Expand All @@ -55,13 +66,17 @@ class CreateTests extends TypedDatasetSuite with Matchers {

test("vector fields") {
def prop[T: Arbitrary: TypedEncoder] = forAll {
(d1: Vector[T], d2: Vector[Option[T]], d3: Vector[X1[T]], d4: Vector[X1[Option[T]]],
d5: X1[Vector[T]]) =>
(TypedDataset.create(Seq(d1)).collect().run().head ?= d1) &&
(TypedDataset.create(Seq(d2)).collect().run().head ?= d2) &&
(TypedDataset.create(Seq(d3)).collect().run().head ?= d3) &&
(TypedDataset.create(Seq(d4)).collect().run().head ?= d4) &&
(TypedDataset.create(Seq(d5)).collect().run().head ?= d5)
(d1: Vector[T],
d2: Vector[Option[T]],
d3: Vector[X1[T]],
d4: Vector[X1[Option[T]]],
d5: X1[Vector[T]]
) =>
(TypedDataset.create(Seq(d1)).collect().run().head ?= d1) &&
(TypedDataset.create(Seq(d2)).collect().run().head ?= d2) &&
(TypedDataset.create(Seq(d3)).collect().run().head ?= d3) &&
(TypedDataset.create(Seq(d4)).collect().run().head ?= d4) &&
(TypedDataset.create(Seq(d5)).collect().run().head ?= d5)
}

check(prop[Boolean])
Expand All @@ -77,9 +92,13 @@ class CreateTests extends TypedDatasetSuite with Matchers {

test("list fields") {
def prop[T: Arbitrary: TypedEncoder] = forAll {
(d1: List[T], d2: List[Option[T]], d3: List[X1[T]], d4: List[X1[Option[T]]],
d5: X1[List[T]]) =>
(TypedDataset.create(Seq(d1)).collect().run().head ?= d1) &&
(d1: List[T],
d2: List[Option[T]],
d3: List[X1[T]],
d4: List[X1[Option[T]]],
d5: X1[List[T]]
) =>
(TypedDataset.create(Seq(d1)).collect().run().head ?= d1) &&
(TypedDataset.create(Seq(d2)).collect().run().head ?= d2) &&
(TypedDataset.create(Seq(d3)).collect().run().head ?= d3) &&
(TypedDataset.create(Seq(d4)).collect().run().head ?= d4) &&
Expand All @@ -98,16 +117,23 @@ class CreateTests extends TypedDatasetSuite with Matchers {
}

test("Map fields (scala.Predef.Map / scala.collection.immutable.Map)") {
def prop[A: Arbitrary: NotCatalystNullable: TypedEncoder, B: Arbitrary: NotCatalystNullable: TypedEncoder] = forAll {
(d1: Map[A, B], d2: Map[B, A], d3: Map[A, Option[B]],
d4: Map[A, X1[B]], d5: Map[X1[A], B], d6: Map[X1[A], X1[B]]) =>

(TypedDataset.create(Seq(d1)).collect().run().head ?= d1) &&
(TypedDataset.create(Seq(d2)).collect().run().head ?= d2) &&
(TypedDataset.create(Seq(d3)).collect().run().head ?= d3) &&
(TypedDataset.create(Seq(d4)).collect().run().head ?= d4) &&
(TypedDataset.create(Seq(d5)).collect().run().head ?= d5) &&
(TypedDataset.create(Seq(d6)).collect().run().head ?= d6)
def prop[
A: Arbitrary: NotCatalystNullable: TypedEncoder,
B: Arbitrary: NotCatalystNullable: TypedEncoder
] = forAll {
(d1: Map[A, B],
d2: Map[B, A],
d3: Map[A, Option[B]],
d4: Map[A, X1[B]],
d5: Map[X1[A], B],
d6: Map[X1[A], X1[B]]
) =>
(TypedDataset.create(Seq(d1)).collect().run().head ?= d1) &&
(TypedDataset.create(Seq(d2)).collect().run().head ?= d2) &&
(TypedDataset.create(Seq(d3)).collect().run().head ?= d3) &&
(TypedDataset.create(Seq(d4)).collect().run().head ?= d4) &&
(TypedDataset.create(Seq(d5)).collect().run().head ?= d5) &&
(TypedDataset.create(Seq(d6)).collect().run().head ?= d6)
}

check(prop[String, String])
Expand All @@ -123,30 +149,43 @@ class CreateTests extends TypedDatasetSuite with Matchers {

test("maps with Option keys should not resolve the TypedEncoder") {
val data: Seq[Map[Option[Int], Int]] = Seq(Map(Some(5) -> 5))
illTyped("TypedDataset.create(data)", ".*could not find implicit value for parameter encoder.*")
illTyped(
"TypedDataset.create(data)",
".*could not find implicit value for parameter encoder.*"
)
}

test("not aligned columns should throw an exception") {
val v = Vector(X2(1,2))
val v = Vector(X2(1, 2))
val df = TypedDataset.create(v).dataset.toDF()

a [IllegalStateException] should be thrownBy {
a[IllegalStateException] should be thrownBy {
TypedDataset.createUnsafe[X1[Int]](df).show().run()
}
}

test("dataset with different column order") {
// e.g. when loading data from partitioned dataset
// the partition columns get appended to the end of the underlying relation
def prop[A: Arbitrary: TypedEncoder, B: Arbitrary: TypedEncoder] = forAll {
(a1: A, b1: B) => {
val ds = TypedDataset.create(
Vector((b1, a1))
).dataset.toDF("b", "a").as[X2[A, B]](TypedExpressionEncoder[X2[A, B]])
TypedDataset.create(ds).collect().run().head ?= X2(a1, b1)

def prop[A: Arbitrary: TypedEncoder, B: Arbitrary: TypedEncoder] =
forAll { (a1: A, b1: B) =>
{
// this code separates the different column order from the 'as'ing, requires createUnsafe to actually work.
// using create directly falsely assumes no type checking will take place on the actually incorrect encoders (DBR 14.3 does this)
val df = TypedDataset
.create(
Vector((b1, a1))
)
.dataset
.toDF("b", "a")
TypedDataset
.createUnsafe(df)(TypedEncoder[X2[A, B]])
.collect()
.run()
.head ?= X2(a1, b1)

}
}
}
check(prop[X1[Double], X1[X1[SQLDate]]])
check(prop[String, Int])
}
Expand Down

0 comments on commit dd10cee

Please sign in to comment.