diff --git a/build.sbt b/build.sbt index 682dc5e..0924b2a 100644 --- a/build.sbt +++ b/build.sbt @@ -1,11 +1,11 @@ -val scala3Version = "3.4.0-RC4" -val mainVersion = "0.2.5" +val scala3Version = "3.4.1-RC1" +val mainVersion = "0.2.5" Global / onChangedBuildSource := ReloadOnSourceChanges -ThisBuild / organization := "ru.primetalk" -ThisBuild / version := mainVersion -ThisBuild / scalaVersion := scala3Version +ThisBuild / organization := "ru.primetalk" +ThisBuild / version := mainVersion +ThisBuild / scalaVersion := scala3Version ThisBuild / versionScheme := Some("early-semver") // scalacOptions ++= Seq( @@ -27,20 +27,21 @@ val catsEffect = "org.typelevel" %% "cats-effect" % "3.5.0" val catsCore = "org.typelevel" %% "cats-core" % "2.9.0" val fs2 = libraryDependencies ++= Seq( "co.fs2" %% "fs2-core" % "3.7.0", - "co.fs2" %% "fs2-io" % "3.7.0", + "co.fs2" %% "fs2-io" % "3.7.0" ) val commonSettings = Seq( scalaVersion := scala3Version, scalacOptions ++= Seq( - //"-Xmax-inlines=50", - "-deprecation", + // "-Xmax-inlines=50", + "-deprecation" ), libraryDependencies ++= Seq( catsCore, - "com.novocode" % "junit-interface" % "0.11" % Test, - "org.scalacheck" %% "scalacheck" % "1.17.0" % Test, - "org.scalatest" %% "scalatest" % "3.2.15" % Test, + //"io.github.kitlangton" %% "quotidian" % "0.0.9", + "com.novocode" % "junit-interface" % "0.11" % Test, + "org.scalacheck" %% "scalacheck" % "1.17.0" % Test, + "org.scalatest" %% "scalatest" % "3.2.15" % Test ) ) @@ -49,11 +50,11 @@ lazy val root = (project in file(".")) typedOntologyMetaMeta, typedOntologyMeta, typedOntologySimpleMeta, - ontologyExample1, + ontologyExample1 ) .settings( - name := "typed-ontology", - publish / skip := true, + name := "typed-ontology", + publish / skip := true ) //lazy val typeSet = project // .in(file("type-set")) @@ -66,34 +67,85 @@ lazy val typedOntologyMetaMeta = project .in(file("typed-ontology-metameta")) .settings( name := "typed-ontology-metameta", - fs2, + fs2 ) - .settings(commonSettings *) + .settings(commonSettings*) lazy val typedOntologyMeta = project .in(file("typed-ontology-meta")) .settings( - name := "typed-ontology-meta", + name := "typed-ontology-meta" ) .dependsOn(typedOntologyMetaMeta) - .settings(commonSettings :_*) + .settings(commonSettings: _*) lazy val typedOntologySimpleMeta = project .in(file("typed-ontology-simple-meta")) .settings( - name := "typed-ontology-simple-meta", + name := "typed-ontology-simple-meta" ) .dependsOn(typedOntologyMetaMeta) - .settings(commonSettings *) + .settings(commonSettings*) lazy val ontologyExample1 = project .in(file("ontology-example1")) .settings( - name := "ontology-example1", - publish / skip := true, + name := "ontology-example1", + publish / skip := true + ) + .dependsOn( + typedOntologySimpleMeta, + typedOntologyMetaMeta + ) + .settings(commonSettings*) + +val quillVersion = "4.8.1" + +lazy val ontologyQuillParser = project + .in(file("ontology-quill-parser")) + .settings( + name := "ontology-quill-parser", + publish / skip := true + ) + .dependsOn( + typedOntologySimpleMeta, + typedOntologyMetaMeta + ) + .settings(commonSettings*) + .settings( + libraryDependencies ++= Seq( + // postgres driver + "org.postgresql" % "postgresql" % "42.7.0", + // Syncronous JDBC Modules + "io.getquill" %% "quill-jdbc" % quillVersion, + ) + ) + +lazy val ontologyQuill = project + .in(file("ontology-quill")) + .settings( + name := "ontology-quill", + publish / skip := true ) .dependsOn( typedOntologySimpleMeta, typedOntologyMetaMeta, + ontologyQuillParser + ) + .settings(commonSettings*) + .settings( + libraryDependencies ++= Seq( + // postgres driver + "org.postgresql" % "postgresql" % "42.7.0", + // Syncronous JDBC Modules + "io.getquill" %% "quill-jdbc" % quillVersion, + // // Or ZIO Modules + // "io.getquill" %% "quill-jdbc-zio" % quillVersion, + // // Or Cassandra + // "io.getquill" %% "quill-cassandra" % quillVersion, + // // Or Cassandra + ZIO + // "io.getquill" %% "quill-cassandra-zio" % quillVersion, + // // Add for Caliban Integration + // "io.getquill" %% "quill-caliban" % quillVersion + ) ) - .settings(commonSettings *) diff --git a/ontology-example1/src/main/scala/ru/primetalk/typed/ontology/example1/orderOntology.scala b/ontology-example1/src/main/scala/ru/primetalk/typed/ontology/example1/orderOntology.scala index 6ac2746..e8b453b 100644 --- a/ontology-example1/src/main/scala/ru/primetalk/typed/ontology/example1/orderOntology.scala +++ b/ontology-example1/src/main/scala/ru/primetalk/typed/ontology/example1/orderOntology.scala @@ -3,39 +3,64 @@ package ru.primetalk.typed.ontology.example1 import ru.primetalk.typed.ontology.simple.meta._ import ru.primetalk.typed.ontology.metameta.OntologyType.Record import java.time.LocalDateTime +import SimpleTypes.{given, *} object Product extends TableBuilder: object id extends column[Int] + val id1 = id + type Id = id1.type + //type Id = id.type // Scala 3 bug object name extends column[String] + val name1 = name + type Name = name1.type object price extends column[BigInt] - - type TableSchema = id.type #: name.type #: price.type #: EmptySchema + type Price = price.type + type PriceSchema = Price #: EmptySchema + type NamePriceSchema = Name #: Price #: EmptySchema + type TableSchema = Id #: Name #: Price #: EmptySchema val tableSchema: TableSchema = fields(id, name, price) val idNameSchema = fields(id, name) - val primaryKeySchema = fields(id) + val namePriceSchema = fields(name, price) + type PrimaryKeySchema = id.type #: EmptySchema + val primaryKeySchema: PrimaryKeySchema = fields(id) val fullSchema = infer[TableSchema] + val svt = summon[SchemaValueType.Aux1[TableSchema]] + type Row = svt.Value object Order extends TableBuilder: object id extends column[Int] + val id1 = id + type Id = id1.type object date extends column[LocalDateTime] - type TableSchema = id.type #: date.type #: EmptySchema + type Date = date.type + type TableSchema = Id #: Date #: EmptySchema val tableSchema: TableSchema = fields(id, date) val ts = fields(id, date) type TS = ts.Type + val svt = summon[SchemaValueType.Aux1[TableSchema]] + type Row = svt.Value object OrderItem extends TableBuilder: object id extends column[Int] + val id1 = id + type Id = id1.type object orderId extends column[Int] + type OrderId = orderId.type object productId extends column[Int] + type ProductId = productId.type // val productId = Product.id// does not work well with toString - type TableSchema = id.type #: orderId.type #: productId.type #: EmptySchema - val tableSchema = infer[TableSchema] + type TableSchema = Id #: OrderId #: ProductId #: EmptySchema + + val tableSchema: TableSchema = fields(id, orderId, productId) + // val tableSchema = infer[TableSchema] // val tableSchema: TableSchema = id #: orderId #: productId #: EmptySchema - type SmallerSchema = id.type #: orderId.type #: EmptySchema + type SmallerSchema = Id #: OrderId #: EmptySchema val smallerSchema: SmallerSchema = infer[SmallerSchema] lazy val orderIdFk = orderId.foreignKey(Order.id) lazy val productIdFk = productId.foreignKey(Product.id) + val svt = summon[SchemaValueType.Aux1[TableSchema]] + type Row = svt.Value diff --git a/ontology-example1/src/test/scala/ru/primetalk/typed/ontology/example1/PersonSpec.scala b/ontology-example1/src/test/scala/ru/primetalk/typed/ontology/example1/PersonSpec.scala index 82a4909..e4666c9 100644 --- a/ontology-example1/src/test/scala/ru/primetalk/typed/ontology/example1/PersonSpec.scala +++ b/ontology-example1/src/test/scala/ru/primetalk/typed/ontology/example1/PersonSpec.scala @@ -3,25 +3,25 @@ package ru.primetalk.typed.ontology.example1 import org.junit.Test import ru.primetalk.typed.ontology.simple.meta._ import ru.primetalk.typed.ontology.metameta.OntologyType.Record +import SimpleTypes._ +// class PersonSpec: -class PersonSpec: +// abstract final class Person - abstract final class Person +// object personProps extends RecordSchemaBuilder[Person]: +// val name = property[String]("name") +// val age = property[Int]("age") // : SimplePropertyId[Record[Person], Int] +// val title = property[String]("title") +// val baseSchema = fields(age, name) - object personProps extends RecordSchemaBuilder[Person]: - val name = property[String]("name") - val age = property[Int]("age") // : SimplePropertyId[Record[Person], Int] - val title = property[String]("title") - val baseSchema = fields(age, name) +// @Test def schemaTest = +// println(personProps.baseSchema) +// val bs = personProps.baseSchema +// val person1: bs.svt.Value = (20, "Vasya") +// val m1 = bs.convertToMap(person1) - @Test def schemaTest = - println(personProps.baseSchema) - val bs = personProps.baseSchema - val person1: bs.Values = (20, "Vasya") - val m1 = bs.convertToMap(person1) +// assert(bs.get(personProps.name)(person1) == Some("Vasya")) +// assert(bs.get(personProps.age)(person1) == Some(20)) +// assert(bs.get(personProps.title)(person1) == None) - assert(bs.get(personProps.name)(person1) == Some("Vasya")) - assert(bs.get(personProps.age)(person1) == Some(20)) - assert(bs.get(personProps.title)(person1) == None) - - assert(m1 == Map("name" -> "Vasya", "age" -> 20)) +// assert(m1 == Map("name" -> "Vasya", "age" -> 20)) diff --git a/ontology-example1/src/test/scala/ru/primetalk/typed/ontology/example3/Order2Spec.scala b/ontology-example1/src/test/scala/ru/primetalk/typed/ontology/example3/Order2Spec.scala index 77ce365..cc39f02 100644 --- a/ontology-example1/src/test/scala/ru/primetalk/typed/ontology/example3/Order2Spec.scala +++ b/ontology-example1/src/test/scala/ru/primetalk/typed/ontology/example3/Order2Spec.scala @@ -9,9 +9,12 @@ import compiletime.ops.int._ import ru.primetalk.typed.ontology.example1._ import java.time.LocalDateTime import ru.primetalk.typed.ontology.simple.listrelation._ +import SimpleTypes.{given, *} trait TestData extends BaseSpec: - val product1: Product.Row = (1, "product1", BigInt(10)) + import Product.{given, *} + val svt = summon[SchemaValueType.Aux1[TableSchema]] + val product1: svt.Value = (1, "product1", BigInt(10)) val product2: Product.Row = (2, "product2", BigInt(20)) val products = RelationList(Product.tableSchema)(List(product1, product2)) val order1: Order.Row = (1, LocalDateTime.of(2022, java.time.Month.JANUARY, 23, 0, 0, 0, 0)) @@ -27,79 +30,79 @@ class Order2Spec extends TestData: println(Product.id.toString) assertResult("id: int")(Product.id.toString) } - test("Projection to smaller schema") { - val v = orderItems.projection(OrderItem.smallerSchema) - v.rows should equal(List((1, 1), (2, 1), (3, 1))) - } - test("Schema concatenation") { - val joinSchema = OrderItem.tableSchema.concat(Order.tableSchema) - OrderItem.tableSchema.toString should equal("id: int, orderId: int, productId: int") - Order.tableSchema.toString should equal("id: int, date: LocalDateTime") - joinSchema.toString should equal( - OrderItem.tableSchema.toString + ", " + Order.tableSchema.toString - ) - } - test("Join using foreign key") { - val result = fullInnerJoin(orderItems, orders, OrderItem.orderIdFk) - // val cjs = JointSchema(OrderItem.tableSchema, Order.tableSchema)(joinSchema) +// test("Projection to smaller schema") { +// val v = orderItems.projection(OrderItem.smallerSchema) +// v.rows should equal(List((1, 1), (2, 1), (3, 1))) +// } + // test("Schema concatenation") { + // val joinSchema = OrderItem.tableSchema.concat(Order.tableSchema) + // OrderItem.tableSchema.toString should equal("id: int, orderId: int, productId: int") + // Order.tableSchema.toString should equal("id: int, date: LocalDateTime") + // joinSchema.toString should equal( + // OrderItem.tableSchema.toString + ", " + Order.tableSchema.toString + // ) + // } +// test("Join using foreign key") { +// val result = fullInnerJoin(orderItems, orders, OrderItem.orderIdFk) +// // val cjs = JointSchema(OrderItem.tableSchema, Order.tableSchema)(joinSchema) - val expected = List( - orderItem1 ++ order1, - orderItem2 ++ order1, - orderItem3 ++ order1 - ) +// val expected = List( +// orderItem1 ++ order1, +// orderItem2 ++ order1, +// orderItem3 ++ order1 +// ) - assertResult(expected)(result) - } - test("Cross product") { - val result = crossProduct(orderItems, orders) - val expected = List( - orderItem1 ++ order1, - orderItem2 ++ order1, - orderItem3 ++ order1 - ) - result should equal(expected) - } - // test("Join using withFk"){ - // val result = orderItems.withFk(OrderItem.productIdFk).join(products) +// assertResult(expected)(result) +// } +// test("Cross product") { +// val result = crossProduct(orderItems, orders) +// val expected = List( +// orderItem1 ++ order1, +// orderItem2 ++ order1, +// orderItem3 ++ order1 +// ) +// result should equal(expected) +// } +// // test("Join using withFk"){ +// // val result = orderItems.withFk(OrderItem.productIdFk).join(products) - // val expected = List( - // orderItem1 ++ product1, - // orderItem2 ++ product1, - // orderItem3 ++ product2, - // ) +// // val expected = List( +// // orderItem1 ++ product1, +// // orderItem2 ++ product1, +// // orderItem3 ++ product2, +// // ) - // assertResult(expected)(result) - // } - test("Indices of properties") { - type T = OrderItem.tableSchema.IndicesOfProps[OrderItem.TableSchema] - val res = OrderItem.tableSchema.indicesOfProps(OrderItem.tableSchema) - assert(res == (0, 1, 2)) - val res2 = OrderItem.tableSchema.indicesOfProps(OrderItem.smallerSchema) - assert(res2 == (0, 1)) - } +// // assertResult(expected)(result) +// // } +// test("Indices of properties") { +// type T = OrderItem.tableSchema.IndicesOfProps[OrderItem.TableSchema] +// val res = OrderItem.tableSchema.indicesOfProps(OrderItem.tableSchema) +// assert(res == (0, 1, 2)) +// val res2 = OrderItem.tableSchema.indicesOfProps(OrderItem.smallerSchema) +// assert(res2 == (0, 1)) +// } - test("Projection using indices") { - type Iid = orderItems.schema.IndexOfProp[OrderItem.id.type] - type IInt = RecordSchema.IndexOfTypeInTuple[(Int, String), Int] - val iid: IInt = 0 - type IorderId = orderItems.schema.IndexOfProp[OrderItem.orderId.type] - type Inds = orderItems.schema.IndicesOfProps[OrderItem.smallerSchema.type] - val indicesU = orderItems.schema.indicesOfProps(OrderItem.smallerSchema) - val indices1: (Int, Int) = indicesU - val indices2: (Iid, IorderId) = indicesU - val indices: orderItems.schema.IndicesOfProps[OrderItem.smallerSchema.type] = indicesU +// test("Projection using indices") { +// type Iid = orderItems.schema.IndexOfProp[OrderItem.id.type] +// type IInt = RecordSchema.IndexOfTypeInTuple[(Int, String), Int] +// val iid: IInt = 0 +// type IorderId = orderItems.schema.IndexOfProp[OrderItem.orderId.type] +// type Inds = orderItems.schema.IndicesOfProps[OrderItem.smallerSchema.type] +// val indicesU = orderItems.schema.indicesOfProps(OrderItem.smallerSchema) +// val indices1: (Int, Int) = indicesU +// val indices2: (Iid, IorderId) = indicesU +// val indices: orderItems.schema.IndicesOfProps[OrderItem.smallerSchema.type] = indicesU - val p = Product.primaryKeySchema.projectorFrom(Product.tableSchema) - val res = products.rows.map(p) - assert(res == List(Tuple1(1), Tuple1(2)), s"res=$res") - } +// val p = Product.primaryKeySchema.projectorFrom(Product.tableSchema) +// val res = products.rows.map(p) +// assert(res == List(Tuple1(1), Tuple1(2)), s"res=$res") +// } - test("propertyGetter") { - val getId = orderItems.schema.propertyGetter(OrderItem.id) - val res = orderItems.rows.map(getId) - assert(res == List(1, 2, 3)) - val getProductId = orderItems.schema.propertyGetter(OrderItem.productId) - val pres = orderItems.rows.map(getProductId) - assert(pres == List(1, 1, 2)) - } +// test("propertyGetter") { +// val getId = orderItems.schema.propertyGetter(OrderItem.id) +// val res = orderItems.rows.map(getId) +// assert(res == List(1, 2, 3)) +// val getProductId = orderItems.schema.propertyGetter(OrderItem.productId) +// val pres = orderItems.rows.map(getProductId) +// assert(pres == List(1, 1, 2)) +// } diff --git a/ontology-example1/src/test/scala/ru/primetalk/typed/ontology/example3/Rel2Spec.scala b/ontology-example1/src/test/scala/ru/primetalk/typed/ontology/example3/Rel2Spec.scala index 56f6ab4..a3581cb 100644 --- a/ontology-example1/src/test/scala/ru/primetalk/typed/ontology/example3/Rel2Spec.scala +++ b/ontology-example1/src/test/scala/ru/primetalk/typed/ontology/example3/Rel2Spec.scala @@ -6,15 +6,19 @@ import java.time.LocalDateTime import ru.primetalk.typed.ontology.simple.meta.#: import ru.primetalk.typed.ontology.simple.meta.EmptySchema import ru.primetalk.typed.ontology.simple.relalg.Relation -import ru.primetalk.typed.ontology.simple.relalg.convertSortedMapToRelation -import ru.primetalk.typed.ontology.simple.relalg.convertSortedMapToV +// import ru.primetalk.typed.ontology.simple.relalg.convertSortedMapToRelation +// import ru.primetalk.typed.ontology.simple.relalg.convertSortedMapToV import ru.primetalk.typed.ontology.simple.meta.SimplePropertyId import ru.primetalk.typed.ontology.simple.meta.RecordSchema import ru.primetalk.typed.ontology.simple.relalg.relation - +import ru.primetalk.typed.ontology.simple.meta.SimpleTypes +import SimpleTypes.{given, *} import scala.collection.immutable.SortedMap import cats.MonoidK import cats.Applicative +import ru.primetalk.typed.ontology.simple.meta.{Projector, PropertyProjector} +import ru.primetalk.typed.ontology.simple.meta.SchemaValueType +import ru.primetalk.typed.ontology.simple.meta.RecordPropertyValueType trait TestDataRel2 extends BaseSpec: val product1: Product.Row = (1, "product1", BigInt(5)) @@ -35,6 +39,17 @@ class Rel2Spec extends TestDataRel2: println(Product.id.toString) assertResult("id: int")(Product.id.toString) } + test("availability of Projector") { + val p0 = summon[Projector[Product.Id #: EmptySchema, ?, EmptySchema, ?]] + val p1 = summon[Projector[Product.Id #: EmptySchema, ?, Product.Id #: EmptySchema, ?]]( + using someSchemaPlusPropertyProjector + ) + val pKey = summon[Projector[Product.TableSchema, ?, Product.PrimaryKeySchema, ?]] + val svt = summon[SchemaValueType.Aux1[Product.TableSchema]] + val product1: svt.Value = (1, "name", BigInt(1)) + val product2: Product.Row = (1, "name", BigInt(1)) + pKey.apply(product1) + } test("projection") { val ids = products.projection(Product.primaryKeySchema) ids.rows should equal(List(Tuple(1), Tuple(2))) @@ -66,16 +81,50 @@ class Rel2Spec extends TestDataRel2: val s1: S1 = RecordSchema.constSchema[S1] s1 should equal(s2) } - test("Remove property from schema") { + test("Check Product.id") { + val id = Product.id + id should equal(id) + s"$id" should equal("id: int") + } + + test("Check schema") { + val s1 = Product.tableSchema + val s2 = Product.id #: Product.name #: Product.price #: EmptySchema + s1 should equal(s2) + } + test("Remove first property from schema") { + val s1 = Product.tableSchema.remove(Product.id) + val s2 = Product.name #: Product.price #: EmptySchema + s1 should equal(s2) + } + test("Remove middle property from schema") { + val s1 = Product.tableSchema.remove(Product.name) + val s2 = Product.id #: Product.price #: EmptySchema + s1 should equal(s2) + } + test("Remove last property from schema") { val s1 = Product.tableSchema.remove(Product.price) val s2 = Product.id #: Product.name #: EmptySchema s1 should equal(s2) } test("cross product") { - val poi = orderItems.crossProduct(products) + val poi = orderItems.crossProduct(products) + poi.rows should equal( + List( + (1, 1, 1, 1, "product1", 5), + (1, 1, 1, 2, "product2", 20), + (2, 1, 1, 1, "product1", 5), + (2, 1, 1, 2, "product2", 20), + (3, 1, 2, 1, "product1", 5), + (3, 1, 2, 2, "product2", 20) + ) + ) val withoutPrice = Product.tableSchema.remove(Product.price) - val s = OrderItem.tableSchema.concat(withoutPrice) // Product.id #: Product.name #: EmptySchema) - // val s = OrderItem.tableSchema.concat(Product.idNameSchema)// Product.id #: Product.name #: EmptySchema) + val s1 = + OrderItem.tableSchema.concat(withoutPrice) // Product.id #: Product.name #: EmptySchema) + val s = OrderItem.tableSchema.concat( + Product.idNameSchema + ) // Product.id #: Product.name #: EmptySchema) val res = poi.projection(s) res.rows should equal( List( @@ -87,22 +136,52 @@ class Rel2Spec extends TestDataRel2: (3, 1, 2, 2, "product2") ) ) - import res.schema._ - res.rows.head(Product.name) should equal("product1") + // import res.schema._ + // res.rows.head(Product.name) should equal("product1") } test("Extension methods to read/write property values") { import products.schema._ - (products.rows.head).apply(Product.name) should equal("product1") - val product1updated = products.rows.head.updated(Product.name)("new name") - product1updated(Product.name) should equal("new name") + val svt = summon[SchemaValueType.Aux1[Product.TableSchema]] + val priceRpvt = summon[RecordPropertyValueType[Product.Price, BigInt]] + val priceSvt = summon[SchemaValueType.Aux1[priceRpvt.Schema]] + val t1priceSvt= summon[SchemaValueType.Aux1[Product.PriceSchema]] + val t2namepriceSvt= summon[SchemaValueType.Aux1[Product.NamePriceSchema]] + val prjPrice = + summon[PropertyProjector[Product.PriceSchema, t1priceSvt.Value, Product.Price, BigInt]] + val prjPrice2I = propertyProjectorTail[ + BigInt, + Product.Price, + String, + Product.Name, + + Product.PriceSchema, + // Product.NamePriceSchema, + Tuple1[BigInt] + ]//(using priceRpvt, prjPrice, t2namepriceSvt) + val prjPrice2 = + summon[PropertyProjector[Product.NamePriceSchema, t2namepriceSvt.Value, Product.price.type, BigInt]]( + using + prjPrice2I + ) + // val prjPrice2o = + // summon[Projector[Product.NamePriceSchema, t2namepriceSvt.Value, Product.price.Schema, BigInt]] + val prjPrice3 = + summon[PropertyProjector[Product.TableSchema, svt.Value, Product.price.type, BigInt]]( + using + propertyProjectorTail//(using priceRpvt, prjPrice2, svt) + ) + val prj = summon[PropertyProjector[Product.TableSchema, svt.Value, Product.name.type, String]]//(using propertyProjectorOther) + val v: svt.Value = products.rows.head + new ValueOps(v)(using svt)/Product.name1 should equal("product1") + // new ValueOps(v)(using svt)./(Product.name) should equal("product1") + // val product1updated = products.rows.head.updated(Product.name)("new name") + // product1updated(Product.name) should equal("new name") } test("Calculate column") { object price extends OrderItem.column[Long] - val idGetter = orderItems.schema.propertyGetter(OrderItem.id) - val p = orderItems.prependCalcColumn(price)(idGetter(_) * 10L) - val s = price #: OrderItem.tableSchema - val res = p.projection(s) - res.show should equal( + // val idGetter = orderItems.schema.propertyGetter(OrderItem.id) + val p = orderItems.prependCalcColumn(price)(row => row._1 * 10L) + p.show should equal( """price: long, id: int, orderId: int, productId: int |----- |(10,1,1,1) @@ -112,16 +191,18 @@ class Rel2Spec extends TestDataRel2: } test("Calculate column with expr") { object price extends OrderItem.column[Int] - val p = orderItems.prependCalcColumn(price)({ - import orderItems._ - rowFun(prop(OrderItem.id) * const(10)) - }) + import orderItems._ + val p = orderItems.prependCalcColumn(price)( + rowFun: + prop(OrderItem.id) * const(10) + ) // TODO: test with a newer version of Scala // For some reason the following doesn't work: - // prependCalcColumnF(price)({ - // import orderItems._ - // prop(OrderItem.id) * const(10) - // }) + import orderItems._ + val expr = prop(OrderItem.id) * const(10) + val p2 = orderItems.prependCalcColumnF(price)( + prop(OrderItem.id)// * const(10) + ) p.show should equal( """price: int, id: int, orderId: int, productId: int |----- @@ -142,9 +223,9 @@ class Rel2Spec extends TestDataRel2: ) } test("Filter") { - val idGetter = orderItems.schema.propertyGetter(OrderItem.id) + // val idGetter = orderItems.schema.propertyGetter(OrderItem.id) object id2 extends OrderItem.column[Int] - val p = orderItems.filter(idGetter(_) == 1) + val p = orderItems.filter(_._1 == 1) p.rows should equal(List(orderItem1)) } test("Union") { @@ -153,217 +234,217 @@ class Rel2Spec extends TestDataRel2: } test("empty") { - val p = Relation.empty[OrderItem.tableSchema.type, List](OrderItem.tableSchema) + val p = Relation.empty[OrderItem.TableSchema, OrderItem.Row, List](OrderItem.tableSchema) p.rows shouldBe empty } object sumPrice extends Product.column[BigInt] given ordering: Ordering[Tuple1[String]] = cats.kernel.Order[Tuple1[String]].toOrdering - test("Expenses report with a simple groupMapReduce") { - // SELECT product.name, sum(product.price) - // FROM order_item JOIN product ON order_item.product_id = product.id - // WHERE order_item.order_id = ? - transparent inline def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self]( - product: P, - orderItem: OI - ) = - val prod = product.crossProduct(orderItem) - val joined = prod.filter(row => - prod.schema.propertyGetter(Product.id)(row) == - prod.schema.propertyGetter(OrderItem.productId)(row) - ) - val keySchema = Product.name #: EmptySchema - val aggregateSchema = sumPrice #: EmptySchema - val aggregateISchema = Product.price #: EmptySchema - // val resultSchema = keySchema.concat(aggregateSchema) - val keyF = keySchema.projectorFrom[joined.Schema](joined.schema) // .projection(keySchema) - val priceAsSumPrice = - aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) - val reduced1 = joined.groupMapReduce(keyF)(priceAsSumPrice) - reduced1 should equal( - SortedMap( - Tuple1("product1") -> Tuple1(BigInt(10)), - Tuple1("product2") -> Tuple1(BigInt(20)) - ) - ) - reduced1 - // val reduced = joined.groupMapReduceS(keySchema, aggregateSchema)( - // keyF, - // priceAsSumPrice, - // ) - // reduced should equal(List()) +// test("Expenses report with a simple groupMapReduce") { +// // SELECT product.name, sum(product.price) +// // FROM order_item JOIN product ON order_item.product_id = product.id +// // WHERE order_item.order_id = ? +// transparent inline def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self]( +// product: P, +// orderItem: OI +// ) = +// val prod = product.crossProduct(orderItem) +// val joined = prod.filter(row => +// prod.schema.propertyGetter(Product.id)(row) == +// prod.schema.propertyGetter(OrderItem.productId)(row) +// ) +// val keySchema = Product.name #: EmptySchema +// val aggregateSchema = sumPrice #: EmptySchema +// val aggregateISchema = Product.price #: EmptySchema +// // val resultSchema = keySchema.concat(aggregateSchema) +// val keyF = keySchema.projectorFrom[joined.Schema](joined.schema) // .projection(keySchema) +// val priceAsSumPrice = +// aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) +// val reduced1 = joined.groupMapReduce(keyF)(priceAsSumPrice) +// reduced1 should equal( +// SortedMap( +// Tuple1("product1") -> Tuple1(BigInt(10)), +// Tuple1("product2") -> Tuple1(BigInt(20)) +// ) +// ) +// reduced1 +// // val reduced = joined.groupMapReduceS(keySchema, aggregateSchema)( +// // keyF, +// // priceAsSumPrice, +// // ) +// // reduced should equal(List()) - val result = expensesReport(products, orderItems) - result should equal( - SortedMap( - Tuple1("product1") -> Tuple1(BigInt(10)), - Tuple1("product2") -> Tuple1(BigInt(20)) - ) - ) - } +// val result = expensesReport(products, orderItems) +// result should equal( +// SortedMap( +// Tuple1("product1") -> Tuple1(BigInt(10)), +// Tuple1("product2") -> Tuple1(BigInt(20)) +// ) +// ) +// } - test("Expenses report with a groupMapReduce and then schema-based transformation") { - // SELECT product.name, sum(product.price) - // FROM order_item JOIN product ON order_item.product_id = product.id - // WHERE order_item.order_id = ? - def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self](product: P, orderItem: OI) = - val prod = product.crossProduct(orderItem) - val joined = prod.filter(row => - prod.schema.propertyGetter(Product.id)(row) == - prod.schema.propertyGetter(OrderItem.productId)(row) - ) - val keySchema = Product.name #: EmptySchema - val aggregateSchema = sumPrice #: EmptySchema - val aggregateISchema = Product.price #: EmptySchema - val resultSchema = keySchema.concat(aggregateSchema) - val keyF = keySchema.projectorFrom(joined.schema) // .projection(keySchema) - val priceAsSumPrice = - aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) - val reduced1 = joined.groupMapReduce(keyF)(priceAsSumPrice) - reduced1 should equal( - SortedMap( - Tuple1("product1") -> Tuple1(BigInt(10)), - Tuple1("product2") -> Tuple1(BigInt(20)) - ) - ) - // convertSortedMapToRelation[List, keySchema.type, aggregateSchema.type](keySchema, aggregateSchema)(reduced1) - // val vals = convertSortedMapToV[List, keySchema.type, aggregateSchema.type](keySchema, aggregateSchema)(reduced1) - val concat = keySchema.concatValues(aggregateSchema)(resultSchema) - // concat - val allVals: Seq[resultSchema.Values] = reduced1.toSeq.map(concat(_, _)) - import cats.syntax.semigroupk.toSemigroupKOps - val vals = allVals.foldLeft(MonoidK[List].empty[resultSchema.Values])((b, a) => - b <+> Applicative[List].pure(a) - ) - Relation.apply(resultSchema)(vals) +// test("Expenses report with a groupMapReduce and then schema-based transformation") { +// // SELECT product.name, sum(product.price) +// // FROM order_item JOIN product ON order_item.product_id = product.id +// // WHERE order_item.order_id = ? +// def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self](product: P, orderItem: OI) = +// val prod = product.crossProduct(orderItem) +// val joined = prod.filter(row => +// prod.schema.propertyGetter(Product.id)(row) == +// prod.schema.propertyGetter(OrderItem.productId)(row) +// ) +// val keySchema = Product.name #: EmptySchema +// val aggregateSchema = sumPrice #: EmptySchema +// val aggregateISchema = Product.price #: EmptySchema +// val resultSchema = keySchema.concat(aggregateSchema) +// val keyF = keySchema.projectorFrom(joined.schema) // .projection(keySchema) +// val priceAsSumPrice = +// aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) +// val reduced1 = joined.groupMapReduce(keyF)(priceAsSumPrice) +// reduced1 should equal( +// SortedMap( +// Tuple1("product1") -> Tuple1(BigInt(10)), +// Tuple1("product2") -> Tuple1(BigInt(20)) +// ) +// ) +// // convertSortedMapToRelation[List, keySchema.type, aggregateSchema.type](keySchema, aggregateSchema)(reduced1) +// // val vals = convertSortedMapToV[List, keySchema.type, aggregateSchema.type](keySchema, aggregateSchema)(reduced1) +// val concat = keySchema.concatValues(aggregateSchema)(resultSchema) +// // concat +// val allVals: Seq[resultSchema.Values] = reduced1.toSeq.map(concat(_, _)) +// import cats.syntax.semigroupk.toSemigroupKOps +// val vals = allVals.foldLeft(MonoidK[List].empty[resultSchema.Values])((b, a) => +// b <+> Applicative[List].pure(a) +// ) +// Relation.apply(resultSchema)(vals) - val result = expensesReport(products, orderItems) - result.rows should equal( - List( - ("product1", BigInt(10)), - ("product2", BigInt(20)) - ) - ) - } - test("Expenses report with a schema-based groupMapReduceS") { - // SELECT product.name, sum(product.price) - // FROM order_item JOIN product ON order_item.product_id = product.id - // WHERE order_item.order_id = ? - def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self]( - product: P, - orderItem: OI, - orderId: Order.id.P - ) = - val prod = product.crossProduct( - orderItem - .filter(row => orderItem.schema.propertyGetter(OrderItem.orderId)(row) == orderId) - ) - // TODO: DSL for predicates that use columns Product.id === OrderItem.productId - val joined = prod - .filter(row => - prod.schema.propertyGetter(Product.id)(row) == - prod.schema.propertyGetter(OrderItem.productId)(row) - ) +// val result = expensesReport(products, orderItems) +// result.rows should equal( +// List( +// ("product1", BigInt(10)), +// ("product2", BigInt(20)) +// ) +// ) +// } +// test("Expenses report with a schema-based groupMapReduceS") { +// // SELECT product.name, sum(product.price) +// // FROM order_item JOIN product ON order_item.product_id = product.id +// // WHERE order_item.order_id = ? +// def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self]( +// product: P, +// orderItem: OI, +// orderId: Order.id.P +// ) = +// val prod = product.crossProduct( +// orderItem +// .filter(row => orderItem.schema.propertyGetter(OrderItem.orderId)(row) == orderId) +// ) +// // TODO: DSL for predicates that use columns Product.id === OrderItem.productId +// val joined = prod +// .filter(row => +// prod.schema.propertyGetter(Product.id)(row) == +// prod.schema.propertyGetter(OrderItem.productId)(row) +// ) - val keySchema = Product.name #: EmptySchema - // val aggregateSchema = sumPrice #: EmptySchema - val aggregateISchema = Product.price #: EmptySchema - val resultSchema = keySchema.concat(aggregateISchema) - val keyF = keySchema.projectorFrom(joined.schema) // .projection(keySchema) - val priceAsSumPrice = - aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) - val reduced1 = joined.groupMapReduceS(keySchema, aggregateISchema)(keyF, priceAsSumPrice) - reduced1 +// val keySchema = Product.name #: EmptySchema +// // val aggregateSchema = sumPrice #: EmptySchema +// val aggregateISchema = Product.price #: EmptySchema +// val resultSchema = keySchema.concat(aggregateISchema) +// val keyF = keySchema.projectorFrom(joined.schema) // .projection(keySchema) +// val priceAsSumPrice = +// aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) +// val reduced1 = joined.groupMapReduceS(keySchema, aggregateISchema)(keyF, priceAsSumPrice) +// reduced1 - val result = expensesReport(products, orderItems, 1) - result.rows should equal( - List( - ("product1", BigInt(10)), - ("product2", BigInt(20)) - ) - ) - } - test("Expense report with a classic DSL") { - def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self]( - product: P, - orderItem: OI, - orderIdValue: Order.id.P - ) = - // SELECT product.name, sum(product.price) - // FROM order_item JOIN product ON order_item.product_id = product.id - // WHERE order_item.order_id = ? +// val result = expensesReport(products, orderItems, 1) +// result.rows should equal( +// List( +// ("product1", BigInt(10)), +// ("product2", BigInt(20)) +// ) +// ) +// } +// test("Expense report with a classic DSL") { +// def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self]( +// product: P, +// orderItem: OI, +// orderIdValue: Order.id.P +// ) = +// // SELECT product.name, sum(product.price) +// // FROM order_item JOIN product ON order_item.product_id = product.id +// // WHERE order_item.order_id = ? - // SELECT * FROM order_item WHERE order_item.order_id = ? +// // SELECT * FROM order_item WHERE order_item.order_id = ? - val itemsForOrderId = { - import OrderItem._ - import orderItem._ - filter( - rowFun(prop(orderId) === const(orderIdValue)) - ) // row => orderItem.schema.propertyGetter(orderId)(row) == orderIdValue) - // tagless doesn't work yet... orderItem.filter(orderItem.expr[Boolean]([E[_]] => (e: orderItem.TaglessDsl[E]) => e.value(true)))// row => orderItem.schema.propertyGetter(orderId)(row) == orderIdValue) - } - val prod = product.crossProduct(itemsForOrderId) - // DONE: DSL for predicates that use columns Product.id === OrderItem.productId - val joined = { - import prod._ - filter(rowFun(prop(Product.id) === prop(OrderItem.productId))) - } - val keySchema = Product.name #: EmptySchema - // val aggregateSchema = sumPrice #: EmptySchema - val aggregateISchema = Product.price #: EmptySchema - val resultSchema = keySchema.concat(aggregateISchema) - val keyF = keySchema.projectorFrom(joined.schema) // .projection(keySchema) - val priceAsSumPrice = - aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) - val reduced1 = joined.groupMapReduceS(keySchema, aggregateISchema)(keyF, priceAsSumPrice) - reduced1 +// val itemsForOrderId = { +// import OrderItem._ +// import orderItem._ +// filter( +// rowFun(prop(orderId) === const(orderIdValue)) +// ) // row => orderItem.schema.propertyGetter(orderId)(row) == orderIdValue) +// // tagless doesn't work yet... orderItem.filter(orderItem.expr[Boolean]([E[_]] => (e: orderItem.TaglessDsl[E]) => e.value(true)))// row => orderItem.schema.propertyGetter(orderId)(row) == orderIdValue) +// } +// val prod = product.crossProduct(itemsForOrderId) +// // DONE: DSL for predicates that use columns Product.id === OrderItem.productId +// val joined = { +// import prod._ +// filter(rowFun(prop(Product.id) === prop(OrderItem.productId))) +// } +// val keySchema = Product.name #: EmptySchema +// // val aggregateSchema = sumPrice #: EmptySchema +// val aggregateISchema = Product.price #: EmptySchema +// val resultSchema = keySchema.concat(aggregateISchema) +// val keyF = keySchema.projectorFrom(joined.schema) // .projection(keySchema) +// val priceAsSumPrice = +// aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) +// val reduced1 = joined.groupMapReduceS(keySchema, aggregateISchema)(keyF, priceAsSumPrice) +// reduced1 - val result = expensesReport(products, orderItems, 1) - result.rows should equal( - List( - ("product1", BigInt(10)), - ("product2", BigInt(20)) - ) - ) - } - test("Expense report with join") { - def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self]( - product: P, - orderItem: OI, - orderIdValue: Order.id.P - ) = - // SELECT product.name, sum(product.price) - // FROM order_item JOIN product ON order_item.product_id = product.id - // WHERE order_item.order_id = ? +// val result = expensesReport(products, orderItems, 1) +// result.rows should equal( +// List( +// ("product1", BigInt(10)), +// ("product2", BigInt(20)) +// ) +// ) +// } +// test("Expense report with join") { +// def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self]( +// product: P, +// orderItem: OI, +// orderIdValue: Order.id.P +// ) = +// // SELECT product.name, sum(product.price) +// // FROM order_item JOIN product ON order_item.product_id = product.id +// // WHERE order_item.order_id = ? - // SELECT * FROM order_item WHERE order_item.order_id = ? +// // SELECT * FROM order_item WHERE order_item.order_id = ? - val itemsForOrderId = { - import OrderItem._ - import orderItem._ - filter( - rowFun(prop(orderId) === const(orderIdValue)) - ) // row => orderItem.schema.propertyGetter(orderId)(row) == orderIdValue) - // tagless doesn't work yet... orderItem.filter(orderItem.expr[Boolean]([E[_]] => (e: orderItem.TaglessDsl[E]) => e.value(true)))// row => orderItem.schema.propertyGetter(orderId)(row) == orderIdValue) - } - val joined = product.join(OrderItem.productIdFk)(itemsForOrderId) +// val itemsForOrderId = { +// import OrderItem._ +// import orderItem._ +// filter( +// rowFun(prop(orderId) === const(orderIdValue)) +// ) // row => orderItem.schema.propertyGetter(orderId)(row) == orderIdValue) +// // tagless doesn't work yet... orderItem.filter(orderItem.expr[Boolean]([E[_]] => (e: orderItem.TaglessDsl[E]) => e.value(true)))// row => orderItem.schema.propertyGetter(orderId)(row) == orderIdValue) +// } +// val joined = product.join(OrderItem.productIdFk)(itemsForOrderId) - val keySchema = Product.name #: EmptySchema - // val aggregateSchema = sumPrice #: EmptySchema - val aggregateISchema = Product.price #: EmptySchema - val resultSchema = keySchema.concat(aggregateISchema) - val keyF = keySchema.projectorFrom[joined.Schema](joined.schema) // .projection(keySchema) - val priceAsSumPrice = - aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) - val reduced1 = joined.groupMapReduceS(keySchema, aggregateISchema)(keyF, priceAsSumPrice) - reduced1 +// val keySchema = Product.name #: EmptySchema +// // val aggregateSchema = sumPrice #: EmptySchema +// val aggregateISchema = Product.price #: EmptySchema +// val resultSchema = keySchema.concat(aggregateISchema) +// val keyF = keySchema.projectorFrom[joined.Schema](joined.schema) // .projection(keySchema) +// val priceAsSumPrice = +// aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) +// val reduced1 = joined.groupMapReduceS(keySchema, aggregateISchema)(keyF, priceAsSumPrice) +// reduced1 - val result = expensesReport(products, orderItems, 1) - result.rows should equal( - List( - ("product1", BigInt(10)), - ("product2", BigInt(20)) - ) - ) - } +// val result = expensesReport(products, orderItems, 1) +// result.rows should equal( +// List( +// ("product1", BigInt(10)), +// ("product2", BigInt(20)) +// ) +// ) +// } diff --git a/ontology-example1/src/test/scala/ru/primetalk/typed/ontology/example3/StreamSpec.scala b/ontology-example1/src/test/scala/ru/primetalk/typed/ontology/example3/StreamSpec.scala index 10cc652..ee23e00 100644 --- a/ontology-example1/src/test/scala/ru/primetalk/typed/ontology/example3/StreamSpec.scala +++ b/ontology-example1/src/test/scala/ru/primetalk/typed/ontology/example3/StreamSpec.scala @@ -7,9 +7,9 @@ import ru.primetalk.typed.ontology.simple.meta.#: import ru.primetalk.typed.ontology.simple.meta.EmptySchema import ru.primetalk.typed.ontology.simple.meta.SimplePropertyId import ru.primetalk.typed.ontology.simple.meta.RecordSchema -import ru.primetalk.typed.ontology.simple.relalg.convertSortedMapToRelation -import ru.primetalk.typed.ontology.simple.relalg.convertSortedMapToV -import ru.primetalk.typed.ontology.simple.relalg.relation +// import ru.primetalk.typed.ontology.simple.relalg.convertSortedMapToRelation +// import ru.primetalk.typed.ontology.simple.relalg.convertSortedMapToV +// import ru.primetalk.typed.ontology.simple.relalg.relation import ru.primetalk.typed.ontology.simple.relalg.Relation import scala.collection.immutable.SortedMap import cats.MonoidK @@ -17,328 +17,328 @@ import cats.Applicative import fs2._ import cats.Eval -trait TestDataStream extends BaseSpec: - val product1: Product.Row = (1, "product1", BigInt(5)) - val product2: Product.Row = (2, "product2", BigInt(20)) - val products = Product.relation(Stream.emits(Seq(product1, product2))) - val order1: Order.Row = (1, LocalDateTime.of(2022, java.time.Month.JANUARY, 23, 0, 0, 0, 0)) - val orders = Order.relation(Stream.emit(order1)) - val orderItem1: OrderItem.Row = (1, 1, Product.tableSchema.get(Product.id)(product1).get) - val orderItem2: OrderItem.Row = (2, 1, Product.tableSchema.get(Product.id)(product1).get) - val orderItem3: OrderItem.Row = (3, 1, Product.tableSchema.get(Product.id)(product2).get) - val orderItems = OrderItem.relation(Stream.emits(Seq(orderItem1, orderItem2, orderItem3))) +// trait TestDataStream extends BaseSpec: +// val product1: Product.Row = (1, "product1", BigInt(5)) +// val product2: Product.Row = (2, "product2", BigInt(20)) +// val products = Product.relation(Stream.emits(Seq(product1, product2))) +// val order1: Order.Row = (1, LocalDateTime.of(2022, java.time.Month.JANUARY, 23, 0, 0, 0, 0)) +// val orders = Order.relation(Stream.emit(order1)) +// val orderItem1: OrderItem.Row = (1, 1, Product.tableSchema.get(Product.id)(product1).get) +// val orderItem2: OrderItem.Row = (2, 1, Product.tableSchema.get(Product.id)(product1).get) +// val orderItem3: OrderItem.Row = (3, 1, Product.tableSchema.get(Product.id)(product2).get) +// val orderItems = OrderItem.relation(Stream.emits(Seq(orderItem1, orderItem2, orderItem3))) -class StreamSpec extends TestDataStream: +// class StreamSpec extends TestDataStream: - given cats.Foldable[[T] =>> Stream[Pure, T]] with - def foldLeft[A, B](fa: Stream[Pure, A], b: B)(f: (B, A) => B): B = - fa.fold(b)(f).compile.toList.head +// given cats.Foldable[[T] =>> Stream[Pure, T]] with +// def foldLeft[A, B](fa: Stream[Pure, A], b: B)(f: (B, A) => B): B = +// fa.fold(b)(f).compile.toList.head - def foldRight[A, B](fa: Stream[Pure, A], lb: Eval[B])(f: (A, Eval[B]) => Eval[B]): Eval[B] = - fa.fold(lb)((b, a) => f(a, b)).compile.toList.head +// def foldRight[A, B](fa: Stream[Pure, A], lb: Eval[B])(f: (A, Eval[B]) => Eval[B]): Eval[B] = +// fa.fold(lb)((b, a) => f(a, b)).compile.toList.head - test("Property to string") { - println(Product.id.toString) - assertResult("id: int")(Product.id.toString) - } - test("projection") { - val ids = products.projection(Product.primaryKeySchema) - ids.rows.compile.toList should equal(List(Tuple(1), Tuple(2))) - ids.schema.toString should equal(Product.primaryKeySchema.toString) - } - test("schema concat") { - val schema3 = products.schema.concat(orderItems.schema) - schema3.toString should equal(products.schema.toString + ", " + orderItems.schema.toString) - } - test("cross product from") { - val poi = products.crossProductFrom(orderItems) - poi.rows.compile.toList should equal( - List( - (1, 1, 1, 1, "product1", BigInt(5)), - (1, 1, 1, 2, "product2", BigInt(20)), - (2, 1, 1, 1, "product1", BigInt(5)), - (2, 1, 1, 2, "product2", BigInt(20)), - (3, 1, 2, 1, "product1", BigInt(5)), - (3, 1, 2, 2, "product2", BigInt(20)) - ) - ) - } - test("ValueOf") { - summon[ValueOf[Product.name.type]].value should equal(Product.name) - } - test("constSchema") { - type S1 = Product.name.type #: Product.price.type #: EmptySchema - val s2 = Product.fields(Product.name, Product.price) - val s1: S1 = RecordSchema.constSchema[S1] - s1 should equal(s2) - } - test("Remove property from schema") { - val s1 = Product.tableSchema.remove(Product.price) - val s2 = Product.id #: Product.name #: EmptySchema - s1 should equal(s2) - } - test("cross product") { - val poi = orderItems.crossProduct(products) - val withoutPrice = Product.tableSchema.remove(Product.price) - val s = OrderItem.tableSchema.concat(withoutPrice) // Product.id #: Product.name #: EmptySchema) - // val s = OrderItem.tableSchema.concat(Product.idNameSchema)// Product.id #: Product.name #: EmptySchema) - val res = poi.projection(s) - res.rows.compile.toList should equal( - List( - (1, 1, 1, 1, "product1"), - (1, 1, 1, 2, "product2"), - (2, 1, 1, 1, "product1"), - (2, 1, 1, 2, "product2"), - (3, 1, 2, 1, "product1"), - (3, 1, 2, 2, "product2") - ) - ) - } - test("Calculate column") { - object price extends OrderItem.column[Long] - val idGetter = orderItems.schema.propertyGetter(OrderItem.id) - val p = orderItems.prependCalcColumn(price)(idGetter(_) * 10L) - val s = price #: OrderItem.tableSchema - val res = p.projection(s) - res.show should equal( - """price: long, id: int, orderId: int, productId: int - |----- - |(10,1,1,1) - |(20,2,1,1) - |(30,3,1,2)""".stripMargin - ) - } - test("Rename column") { - object id2 extends OrderItem.column[Int] - val p = orderItems.rename(OrderItem.id, id2) - p.show should equal( - """id2: int, orderId: int, productId: int - |----- - |(1,1,1) - |(2,1,1) - |(3,1,2)""".stripMargin - ) - } - test("Filter") { - val idGetter = orderItems.schema.propertyGetter(OrderItem.id) - object id2 extends OrderItem.column[Int] - val p = orderItems.filter(idGetter(_) == 1) - p.rows.compile.toList should equal(List(orderItem1)) - } - test("Union") { - val p = orderItems ++ orderItems - p.rows.compile.count should equal(orderItems.rows.compile.count * 2) - } +// test("Property to string") { +// println(Product.id.toString) +// assertResult("id: int")(Product.id.toString) +// } +// test("projection") { +// val ids = products.projection(Product.primaryKeySchema) +// ids.rows.compile.toList should equal(List(Tuple(1), Tuple(2))) +// ids.schema.toString should equal(Product.primaryKeySchema.toString) +// } +// test("schema concat") { +// val schema3 = products.schema.concat(orderItems.schema) +// schema3.toString should equal(products.schema.toString + ", " + orderItems.schema.toString) +// } +// test("cross product from") { +// val poi = products.crossProductFrom(orderItems) +// poi.rows.compile.toList should equal( +// List( +// (1, 1, 1, 1, "product1", BigInt(5)), +// (1, 1, 1, 2, "product2", BigInt(20)), +// (2, 1, 1, 1, "product1", BigInt(5)), +// (2, 1, 1, 2, "product2", BigInt(20)), +// (3, 1, 2, 1, "product1", BigInt(5)), +// (3, 1, 2, 2, "product2", BigInt(20)) +// ) +// ) +// } +// test("ValueOf") { +// summon[ValueOf[Product.name.type]].value should equal(Product.name) +// } +// test("constSchema") { +// type S1 = Product.name.type #: Product.price.type #: EmptySchema +// val s2 = Product.fields(Product.name, Product.price) +// val s1: S1 = RecordSchema.constSchema[S1] +// s1 should equal(s2) +// } +// test("Remove property from schema") { +// val s1 = Product.tableSchema.remove(Product.price) +// val s2 = Product.id #: Product.name #: EmptySchema +// s1 should equal(s2) +// } +// test("cross product") { +// val poi = orderItems.crossProduct(products) +// val withoutPrice = Product.tableSchema.remove(Product.price) +// val s = OrderItem.tableSchema.concat(withoutPrice) // Product.id #: Product.name #: EmptySchema) +// // val s = OrderItem.tableSchema.concat(Product.idNameSchema)// Product.id #: Product.name #: EmptySchema) +// val res = poi.projection(s) +// res.rows.compile.toList should equal( +// List( +// (1, 1, 1, 1, "product1"), +// (1, 1, 1, 2, "product2"), +// (2, 1, 1, 1, "product1"), +// (2, 1, 1, 2, "product2"), +// (3, 1, 2, 1, "product1"), +// (3, 1, 2, 2, "product2") +// ) +// ) +// } +// test("Calculate column") { +// object price extends OrderItem.column[Long] +// val idGetter = orderItems.schema.propertyGetter(OrderItem.id) +// val p = orderItems.prependCalcColumn(price)(idGetter(_) * 10L) +// val s = price #: OrderItem.tableSchema +// val res = p.projection(s) +// res.show should equal( +// """price: long, id: int, orderId: int, productId: int +// |----- +// |(10,1,1,1) +// |(20,2,1,1) +// |(30,3,1,2)""".stripMargin +// ) +// } +// test("Rename column") { +// object id2 extends OrderItem.column[Int] +// val p = orderItems.rename(OrderItem.id, id2) +// p.show should equal( +// """id2: int, orderId: int, productId: int +// |----- +// |(1,1,1) +// |(2,1,1) +// |(3,1,2)""".stripMargin +// ) +// } +// test("Filter") { +// val idGetter = orderItems.schema.propertyGetter(OrderItem.id) +// object id2 extends OrderItem.column[Int] +// val p = orderItems.filter(idGetter(_) == 1) +// p.rows.compile.toList should equal(List(orderItem1)) +// } +// test("Union") { +// val p = orderItems ++ orderItems +// p.rows.compile.count should equal(orderItems.rows.compile.count * 2) +// } - test("empty") { - val p = Relation.empty[OrderItem.tableSchema.type, List](OrderItem.tableSchema) - p.rows shouldBe empty - } - object sumPrice extends Product.column[BigInt] - given ordering: Ordering[Tuple1[String]] = cats.kernel.Order[Tuple1[String]].toOrdering +// test("empty") { +// val p = Relation.empty[OrderItem.tableSchema.type, List](OrderItem.tableSchema) +// p.rows shouldBe empty +// } +// object sumPrice extends Product.column[BigInt] +// given ordering: Ordering[Tuple1[String]] = cats.kernel.Order[Tuple1[String]].toOrdering - test("Expenses report with a simple groupMapReduce") { - // SELECT product.name, sum(product.price) - // FROM order_item JOIN product ON order_item.product_id = product.id - // WHERE order_item.order_id = ? - def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self](product: P, orderItem: OI) = - val prod = product.crossProduct(orderItem) - val joined = prod.filter(row => - prod.schema.propertyGetter(Product.id)(row) == - prod.schema.propertyGetter(OrderItem.productId)(row) - ) - val keySchema = Product.name #: EmptySchema - val aggregateSchema = sumPrice #: EmptySchema - val aggregateISchema = Product.price #: EmptySchema - // val resultSchema = keySchema.concat(aggregateSchema) - val keyF = keySchema.projectorFrom(joined.schema) // .projection(keySchema) - val priceAsSumPrice = - aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) - val reduced1 = joined.groupMapReduce(keyF)(priceAsSumPrice) - reduced1 should equal( - SortedMap( - Tuple1("product1") -> Tuple1(BigInt(10)), - Tuple1("product2") -> Tuple1(BigInt(20)) - ) - ) - reduced1 - // val reduced = joined.groupMapReduceS(keySchema, aggregateSchema)( - // keyF, - // priceAsSumPrice, - // ) - // reduced should equal(List()) +// test("Expenses report with a simple groupMapReduce") { +// // SELECT product.name, sum(product.price) +// // FROM order_item JOIN product ON order_item.product_id = product.id +// // WHERE order_item.order_id = ? +// def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self](product: P, orderItem: OI) = +// val prod = product.crossProduct(orderItem) +// val joined = prod.filter(row => +// prod.schema.propertyGetter(Product.id)(row) == +// prod.schema.propertyGetter(OrderItem.productId)(row) +// ) +// val keySchema = Product.name #: EmptySchema +// val aggregateSchema = sumPrice #: EmptySchema +// val aggregateISchema = Product.price #: EmptySchema +// // val resultSchema = keySchema.concat(aggregateSchema) +// val keyF = keySchema.projectorFrom(joined.schema) // .projection(keySchema) +// val priceAsSumPrice = +// aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) +// val reduced1 = joined.groupMapReduce(keyF)(priceAsSumPrice) +// reduced1 should equal( +// SortedMap( +// Tuple1("product1") -> Tuple1(BigInt(10)), +// Tuple1("product2") -> Tuple1(BigInt(20)) +// ) +// ) +// reduced1 +// // val reduced = joined.groupMapReduceS(keySchema, aggregateSchema)( +// // keyF, +// // priceAsSumPrice, +// // ) +// // reduced should equal(List()) - val result = expensesReport(products, orderItems) - result should equal( - SortedMap( - Tuple1("product1") -> Tuple1(BigInt(10)), - Tuple1("product2") -> Tuple1(BigInt(20)) - ) - ) - } +// val result = expensesReport(products, orderItems) +// result should equal( +// SortedMap( +// Tuple1("product1") -> Tuple1(BigInt(10)), +// Tuple1("product2") -> Tuple1(BigInt(20)) +// ) +// ) +// } - test("Expenses report with a groupMapReduce and then schema-based transformation") { - // SELECT product.name, sum(product.price) - // FROM order_item JOIN product ON order_item.product_id = product.id - // WHERE order_item.order_id = ? - def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self](product: P, orderItem: OI) = - val prod = product.crossProduct(orderItem) - val joined = prod.filter(row => - prod.schema.propertyGetter(Product.id)(row) == - prod.schema.propertyGetter(OrderItem.productId)(row) - ) - val keySchema = Product.name #: EmptySchema - val aggregateSchema = sumPrice #: EmptySchema - val aggregateISchema = Product.price #: EmptySchema - val resultSchema = keySchema.concat(aggregateSchema) - val keyF = keySchema.projectorFrom(joined.schema) // .projection(keySchema) - val priceAsSumPrice = - aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) - val reduced1 = joined.groupMapReduce(keyF)(priceAsSumPrice) - reduced1 should equal( - SortedMap( - Tuple1("product1") -> Tuple1(BigInt(10)), - Tuple1("product2") -> Tuple1(BigInt(20)) - ) - ) - // convertSortedMapToRelation[List, keySchema.type, aggregateSchema.type](keySchema, aggregateSchema)(reduced1) - // val vals = convertSortedMapToV[List, keySchema.type, aggregateSchema.type](keySchema, aggregateSchema)(reduced1) - val concat = keySchema.concatValues(aggregateSchema)(resultSchema) - // concat - val allVals: Seq[resultSchema.Values] = reduced1.toSeq.map(concat(_, _)) - import cats.syntax.semigroupk.toSemigroupKOps - val vals = allVals.foldLeft(MonoidK[List].empty[resultSchema.Values])((b, a) => - b <+> Applicative[List].pure(a) - ) - Relation.apply(resultSchema)(vals) +// test("Expenses report with a groupMapReduce and then schema-based transformation") { +// // SELECT product.name, sum(product.price) +// // FROM order_item JOIN product ON order_item.product_id = product.id +// // WHERE order_item.order_id = ? +// def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self](product: P, orderItem: OI) = +// val prod = product.crossProduct(orderItem) +// val joined = prod.filter(row => +// prod.schema.propertyGetter(Product.id)(row) == +// prod.schema.propertyGetter(OrderItem.productId)(row) +// ) +// val keySchema = Product.name #: EmptySchema +// val aggregateSchema = sumPrice #: EmptySchema +// val aggregateISchema = Product.price #: EmptySchema +// val resultSchema = keySchema.concat(aggregateSchema) +// val keyF = keySchema.projectorFrom(joined.schema) // .projection(keySchema) +// val priceAsSumPrice = +// aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) +// val reduced1 = joined.groupMapReduce(keyF)(priceAsSumPrice) +// reduced1 should equal( +// SortedMap( +// Tuple1("product1") -> Tuple1(BigInt(10)), +// Tuple1("product2") -> Tuple1(BigInt(20)) +// ) +// ) +// // convertSortedMapToRelation[List, keySchema.type, aggregateSchema.type](keySchema, aggregateSchema)(reduced1) +// // val vals = convertSortedMapToV[List, keySchema.type, aggregateSchema.type](keySchema, aggregateSchema)(reduced1) +// val concat = keySchema.concatValues(aggregateSchema)(resultSchema) +// // concat +// val allVals: Seq[resultSchema.Values] = reduced1.toSeq.map(concat(_, _)) +// import cats.syntax.semigroupk.toSemigroupKOps +// val vals = allVals.foldLeft(MonoidK[List].empty[resultSchema.Values])((b, a) => +// b <+> Applicative[List].pure(a) +// ) +// Relation.apply(resultSchema)(vals) - val result = expensesReport(products, orderItems) - result.rows should equal( - List( - ("product1", BigInt(10)), - ("product2", BigInt(20)) - ) - ) - } - test("Expenses report with a schema-based groupMapReduceS") { - // SELECT product.name, sum(product.price) - // FROM order_item JOIN product ON order_item.product_id = product.id - // WHERE order_item.order_id = ? - def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self]( - product: P, - orderItem: OI, - orderId: Order.id.P - ) = - val prod = product.crossProduct( - orderItem - .filter(row => orderItem.schema.propertyGetter(OrderItem.orderId)(row) == orderId) - ) - // TODO: DSL for predicates that use columns Product.id === OrderItem.productId - val joined = prod - .filter(row => - prod.schema.propertyGetter(Product.id)(row) == - prod.schema.propertyGetter(OrderItem.productId)(row) - ) +// val result = expensesReport(products, orderItems) +// result.rows should equal( +// List( +// ("product1", BigInt(10)), +// ("product2", BigInt(20)) +// ) +// ) +// } +// test("Expenses report with a schema-based groupMapReduceS") { +// // SELECT product.name, sum(product.price) +// // FROM order_item JOIN product ON order_item.product_id = product.id +// // WHERE order_item.order_id = ? +// def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self]( +// product: P, +// orderItem: OI, +// orderId: Order.id.P +// ) = +// val prod = product.crossProduct( +// orderItem +// .filter(row => orderItem.schema.propertyGetter(OrderItem.orderId)(row) == orderId) +// ) +// // TODO: DSL for predicates that use columns Product.id === OrderItem.productId +// val joined = prod +// .filter(row => +// prod.schema.propertyGetter(Product.id)(row) == +// prod.schema.propertyGetter(OrderItem.productId)(row) +// ) - val keySchema = Product.name #: EmptySchema - // val aggregateSchema = sumPrice #: EmptySchema - val aggregateISchema = Product.price #: EmptySchema - val resultSchema = keySchema.concat(aggregateISchema) - val keyF = keySchema.projectorFrom(joined.schema) // .projection(keySchema) - val priceAsSumPrice = - aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) - val reduced1 = joined.groupMapReduceS(keySchema, aggregateISchema)(keyF, priceAsSumPrice) - reduced1 +// val keySchema = Product.name #: EmptySchema +// // val aggregateSchema = sumPrice #: EmptySchema +// val aggregateISchema = Product.price #: EmptySchema +// val resultSchema = keySchema.concat(aggregateISchema) +// val keyF = keySchema.projectorFrom(joined.schema) // .projection(keySchema) +// val priceAsSumPrice = +// aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) +// val reduced1 = joined.groupMapReduceS(keySchema, aggregateISchema)(keyF, priceAsSumPrice) +// reduced1 - val result = expensesReport(products, orderItems, 1) - result.rows.compile.toList should equal( - List( - ("product1", BigInt(10)), - ("product2", BigInt(20)) - ) - ) - } - test("Expense report with a classic DSL") { - def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self]( - product: P, - orderItem: OI, - orderIdValue: Order.id.P - ) = - // SELECT product.name, sum(product.price) - // FROM order_item JOIN product ON order_item.product_id = product.id - // WHERE order_item.order_id = ? +// val result = expensesReport(products, orderItems, 1) +// result.rows.compile.toList should equal( +// List( +// ("product1", BigInt(10)), +// ("product2", BigInt(20)) +// ) +// ) +// } +// test("Expense report with a classic DSL") { +// def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self]( +// product: P, +// orderItem: OI, +// orderIdValue: Order.id.P +// ) = +// // SELECT product.name, sum(product.price) +// // FROM order_item JOIN product ON order_item.product_id = product.id +// // WHERE order_item.order_id = ? - // SELECT * FROM order_item WHERE order_item.order_id = ? +// // SELECT * FROM order_item WHERE order_item.order_id = ? - val itemsForOrderId = { - import OrderItem._ - import orderItem._ - filter( - rowFun(prop(orderId) === const(orderIdValue)) - ) // row => orderItem.schema.propertyGetter(orderId)(row) == orderIdValue) - // tagless doesn't work yet... orderItem.filter(orderItem.expr[Boolean]([E[_]] => (e: orderItem.TaglessDsl[E]) => e.value(true)))// row => orderItem.schema.propertyGetter(orderId)(row) == orderIdValue) - } - val prod = product.crossProduct(itemsForOrderId) - // DONE: DSL for predicates that use columns Product.id === OrderItem.productId - val joined = { - import prod._ - filter(rowFun(prop(Product.id) === prop(OrderItem.productId))) - } - val keySchema = Product.name #: EmptySchema - // val aggregateSchema = sumPrice #: EmptySchema - val aggregateISchema = Product.price #: EmptySchema - val resultSchema = keySchema.concat(aggregateISchema) - val keyF = keySchema.projectorFrom(joined.schema) // .projection(keySchema) - val priceAsSumPrice = - aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) - val reduced1 = joined.groupMapReduceS(keySchema, aggregateISchema)(keyF, priceAsSumPrice) - reduced1 +// val itemsForOrderId = { +// import OrderItem._ +// import orderItem._ +// filter( +// rowFun(prop(orderId) === const(orderIdValue)) +// ) // row => orderItem.schema.propertyGetter(orderId)(row) == orderIdValue) +// // tagless doesn't work yet... orderItem.filter(orderItem.expr[Boolean]([E[_]] => (e: orderItem.TaglessDsl[E]) => e.value(true)))// row => orderItem.schema.propertyGetter(orderId)(row) == orderIdValue) +// } +// val prod = product.crossProduct(itemsForOrderId) +// // DONE: DSL for predicates that use columns Product.id === OrderItem.productId +// val joined = { +// import prod._ +// filter(rowFun(prop(Product.id) === prop(OrderItem.productId))) +// } +// val keySchema = Product.name #: EmptySchema +// // val aggregateSchema = sumPrice #: EmptySchema +// val aggregateISchema = Product.price #: EmptySchema +// val resultSchema = keySchema.concat(aggregateISchema) +// val keyF = keySchema.projectorFrom(joined.schema) // .projection(keySchema) +// val priceAsSumPrice = +// aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) +// val reduced1 = joined.groupMapReduceS(keySchema, aggregateISchema)(keyF, priceAsSumPrice) +// reduced1 - val result = expensesReport(products, orderItems, 1) - result.rows.compile.toList should equal( - List( - ("product1", BigInt(10)), - ("product2", BigInt(20)) - ) - ) - } - test("Expense report with join") { - def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self]( - product: P, - orderItem: OI, - orderIdValue: Order.id.P - ) = - // SELECT product.name, sum(product.price) - // FROM order_item JOIN product ON order_item.product_id = product.id - // WHERE order_item.order_id = ? +// val result = expensesReport(products, orderItems, 1) +// result.rows.compile.toList should equal( +// List( +// ("product1", BigInt(10)), +// ("product2", BigInt(20)) +// ) +// ) +// } +// test("Expense report with join") { +// def expensesReport[V[_], P <: products.Self, OI <: orderItems.Self]( +// product: P, +// orderItem: OI, +// orderIdValue: Order.id.P +// ) = +// // SELECT product.name, sum(product.price) +// // FROM order_item JOIN product ON order_item.product_id = product.id +// // WHERE order_item.order_id = ? - // SELECT * FROM order_item WHERE order_item.order_id = ? +// // SELECT * FROM order_item WHERE order_item.order_id = ? - val itemsForOrderId = { - import OrderItem._ - import orderItem._ - filter( - rowFun(prop(orderId) === const(orderIdValue)) - ) // row => orderItem.schema.propertyGetter(orderId)(row) == orderIdValue) - // tagless doesn't work yet... orderItem.filter(orderItem.expr[Boolean]([E[_]] => (e: orderItem.TaglessDsl[E]) => e.value(true)))// row => orderItem.schema.propertyGetter(orderId)(row) == orderIdValue) - } - val joined = product.join(OrderItem.productIdFk)(itemsForOrderId) +// val itemsForOrderId = { +// import OrderItem._ +// import orderItem._ +// filter( +// rowFun(prop(orderId) === const(orderIdValue)) +// ) // row => orderItem.schema.propertyGetter(orderId)(row) == orderIdValue) +// // tagless doesn't work yet... orderItem.filter(orderItem.expr[Boolean]([E[_]] => (e: orderItem.TaglessDsl[E]) => e.value(true)))// row => orderItem.schema.propertyGetter(orderId)(row) == orderIdValue) +// } +// val joined = product.join(OrderItem.productIdFk)(itemsForOrderId) - val keySchema = Product.name #: EmptySchema - // val aggregateSchema = sumPrice #: EmptySchema - val aggregateISchema = Product.price #: EmptySchema - val resultSchema = keySchema.concat(aggregateISchema) - val keyF = keySchema.projectorFrom(joined.schema) // .projection(keySchema) - val priceAsSumPrice = - aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) - val reduced1 = joined.groupMapReduceS(keySchema, aggregateISchema)(keyF, priceAsSumPrice) - reduced1 +// val keySchema = Product.name #: EmptySchema +// // val aggregateSchema = sumPrice #: EmptySchema +// val aggregateISchema = Product.price #: EmptySchema +// val resultSchema = keySchema.concat(aggregateISchema) +// val keyF = keySchema.projectorFrom(joined.schema) // .projection(keySchema) +// val priceAsSumPrice = +// aggregateISchema.projectorFrom(joined.schema) // joined.schema.projection(aggregateSchema) +// val reduced1 = joined.groupMapReduceS(keySchema, aggregateISchema)(keyF, priceAsSumPrice) +// reduced1 - val result = expensesReport(products, orderItems, 1) - result.rows.compile.toList should equal( - List( - ("product1", BigInt(10)), - ("product2", BigInt(20)) - ) - ) - } +// val result = expensesReport(products, orderItems, 1) +// result.rows.compile.toList should equal( +// List( +// ("product1", BigInt(10)), +// ("product2", BigInt(20)) +// ) +// ) +// } diff --git a/ontology-quill-parser/src/main/scala/ru/primetalk/typed/ontology/dbquill/parser/CustomParser.scala b/ontology-quill-parser/src/main/scala/ru/primetalk/typed/ontology/dbquill/parser/CustomParser.scala new file mode 100644 index 0000000..7826294 --- /dev/null +++ b/ontology-quill-parser/src/main/scala/ru/primetalk/typed/ontology/dbquill/parser/CustomParser.scala @@ -0,0 +1,38 @@ +package ru.primetalk.typed.ontology.dbquill.parser + +import io.getquill.parser.ParserLibrary +import scala.quoted._ +import io.getquill.parser.OperationsParser +import io.getquill.ast.Ast +import io.getquill.ast.Infix +import io.getquill.quat.Quat +import io.getquill.parser.engine.ParserChain +import io.getquill.parser.engine.Parser +import io.getquill.norm.TranspileConfig + +object CustomOps { + extension (i: Int) { + def **(exponent: Int) = Math.pow(i, exponent) + } +} + +object CustomParser extends ParserLibrary { + override def operationsParser(using Quotes, TranspileConfig) = + ParserChain.attempt(OperationsParser(_)) orElse + ParserChain.attempt(CustomOperationsParser(_)) +} + +class CustomOperationsParser(rootParse: Parser)(using Quotes) extends Parser(rootParse) { + import quotes.reflect._ + import CustomOps._ + def attempt = { + case '{ ($i: Int) ** ($j: Int) } => + Infix( + List("power(", " ,", ")"), + List(rootParse(i), rootParse(j)), + true, + false, + Quat.Value + ) + } +} diff --git a/ontology-quill-parser/src/main/scala/ru/primetalk/typed/ontology/dbquill/parser/SchemaBasedParser.scala b/ontology-quill-parser/src/main/scala/ru/primetalk/typed/ontology/dbquill/parser/SchemaBasedParser.scala new file mode 100644 index 0000000..dd0fc71 --- /dev/null +++ b/ontology-quill-parser/src/main/scala/ru/primetalk/typed/ontology/dbquill/parser/SchemaBasedParser.scala @@ -0,0 +1,143 @@ +package ru.primetalk.typed.ontology.dbquill.parser + +import io.getquill.parser.ParserLibrary +import scala.quoted._ +import io.getquill.{EntityQueryModel, EntityQuery, Unquoteable} +import io.getquill.metaprog.Extractors._ +import io.getquill.parser.ParserHelpers._ +import io.getquill.parser.QueryParser +import io.getquill.ast.{Ast, Entity, Renameable} +import io.getquill.quat.Quat +import io.getquill.parser.engine.ParserChain +import io.getquill.parser.engine.Parser +import io.getquill.quotation.NonQuotedException +import io.getquill.norm.TranspileConfig +import ru.primetalk.typed.ontology.simple.meta.{ + annotated, + RecordSchema, + SchemaLike, + SchemaValueType, + TableBuilder, + #@ +} +import ru.primetalk.typed.ontology.simple.relalg.Relation +import io.getquill.generic.GenericDecoder +import io.getquill.generic.DecodingType +import io.getquill.generic.GenericEncoder +import io.getquill.querySchema +// object OntEntityQuery { +// def apply[S <: SchemaLike, T](tableName: String, svt: SchemaValueType[S, T]) = +// new OntEntityQuery[S, T, svt.AValue](tableName, svt) +// } + +class OntEntityQuery[S <: SchemaLike, T, AV](val tableName: String, val svt: SchemaValueType[S, T]) + extends EntityQuery[T] { + // override def withFilter(f: T => Boolean): EntityQuery[T] = NonQuotedException() + // override def filter(f: T => Boolean): EntityQuery[T] = NonQuotedException() + // override def map[R](f: T => R): EntityQuery[R] = NonQuotedException() +} + +object MyTestEntity +object MyTestEntity2 + +// extension [T <: TableBuilder](t: T) +// inline def query(using +// svt: SchemaValueType.Aux1[t.TableSchema], + +// )= //: OntEntityQuery[t.TableSchema, svt.Value, svt.AValue] = +// ontquery[t.TableSchema, svt.Value, svt.AValue](t.tableName) + +transparent inline def ontquery[S <: SchemaLike, T <: Tuple, AV <: T#@S](tableName: String)(using + svt: SchemaValueType[S, T] +) = ${ + SchemaBasedParserMacros.ontqueryImpl[S, T, AV]('tableName, 'svt) +} + // querySchema[T](tableName) + // .map{t => + // tupleConverter(t) + // } + // new OntEntityQuery[S, T, AV](tableName, svt) // NonQuotedException() + +class SchemaBasedParser(val rootParse: Parser)(using Quotes, TranspileConfig) + extends Parser(rootParse) + with Parser.PrefilterType[OntEntityQuery[?, ?, ?]] + with PropertyAliases + with Helpers { + import quotes.reflect._ + // import quotes.reflect.{Constant => TConstant, Ident => TIdent, _} + import MatchingOptimizers._ + + // private def warnVerifyNoBranches(v: VerifyNoBranches.Output, expr: Expr[_]): Unit = + // if (v.messages.nonEmpty) + // report.warning("Questionable row-class found.\n" + v.messages.map(_.msg).mkString("\n"), expr) + + def attempt = { + // case expr @ '{ + // type s <: SchemaLike + // type t + // type av + // type overall <: OntEntityQuery[`s`, `t`, `av`] + // new OntEntityQuery[`s`, `t`, `av`]($name, $svt): OntEntityQuery[`s`, `t`, `av`] + // } => + // //error(expr) + // val quat = Quat.Product.apply("unknown", Quat.Product.Type.Abstract, Iterable.empty[(String, Quat)])// InferQuat.ofType(tpe).probit + // Entity.Opinionated("name1", List(), quat, Renameable.Fixed) + case expr @ '{ + type s <: RecordSchema + type t + type av + // type svtt <: SchemaValueType[s, t] : svtt} + new OntEntityQuery[`s`, `t`, `av`]($name, $svt): OntEntityQuery[`s`, `t`, `av`] + } => + // val svtV = svtFromExpr[s, t].unapply(svt).getOrElse(error(svt)) + // val tpe = TypeRepr.of[svtV.AValue] + // val quat = InferQuat.ofType(tpe).probit + val quat = Quat.Product.apply("unknown", Quat.Product.Type.Abstract, Iterable.empty[(String, Quat)])// InferQuat.ofType(tpe).probit + val name1: String = FromExpr.StringFromExpr[String].unapply(name).getOrElse(error(name)) + // warnVerifyNoBranches(VerifyNoBranches.in(quat), expr) + val res = Entity.Opinionated(name1, List(), quat, Renameable.Fixed) + report.info(s"####SchemaBasedParser: $res") + res + + // case expr @ '{ + // type t + // type s <: RecordSchema + // type svtt// <: Expr[SchemaValueType[`s`, `t`]] // : svtt} + // ontquery[`s`, `t`]($name)(using ${ svt: svtt }) + // } => + // // val svtV = svtFromExpr[s, t].unapply(svt).getOrElse(error(svt)) + // val tpe = TypeRepr.of[t #@ s]// TypeRepr.of[svtt] // V.AValue] + // // val tt = Type.of[t #@ s] + // // Quat.Product.Type.Abstract + // val quat = Quat.Product.apply("unknown", Quat.Product.Type.Abstract, Iterable.empty[(String, Quat)])// InferQuat.ofType(tpe).probit + // val name1: String = FromExpr.StringFromExpr[String].unapply(name).getOrElse(error(name)) + // // warnVerifyNoBranches(VerifyNoBranches.in(quat), expr) + // val res = Entity.Opinionated(name1, List(), quat, Renameable.Fixed) + // report.info(s"####SchemaBasedParser: $res") + // res + // // report.info(s"####SchemaBasedParser.expr: ${expr.show}") + // // throw IllegalArgumentException("123") + } + +} + +object SchemaBasedParser extends ParserLibrary: + import Parser._ + override def queryParser(using Quotes, TranspileConfig) = + ParserChain.attempt(SchemaBasedParser(_)) orElse + ParserChain.attempt(QueryParser(_)) + // (using svt: SchemaValueType[S, V]) + + inline given svtGenericDecoder[S <: SchemaLike: Type, V <: Tuple: Type, ResultRow: Type, Session] + : GenericDecoder[ResultRow, Session, TupleConverter[V] #@ S, DecodingType.Specific] = + new: + def apply(i: Int, rr: ResultRow, s: Session): TupleConverter[V] #@ S = + var res: V | Null = null + val a = res.annotated[S] + res.asInstanceOf[TupleConverter[V] #@ S] + + inline given svtGenericEncoder[S <: SchemaLike: Type, V <: Tuple: Type, PrepareRow: Type, Session] + : GenericEncoder[TupleConverter[V] #@ S, PrepareRow, Session] = + new: + def apply(i: Int, t: TupleConverter[V] #@ S, row: PrepareRow, session: Session): PrepareRow = + scala.compiletime.error("svtGenericEncoder not implemented") diff --git a/ontology-quill-parser/src/main/scala/ru/primetalk/typed/ontology/dbquill/parser/SchemaBasedParserMacros.scala b/ontology-quill-parser/src/main/scala/ru/primetalk/typed/ontology/dbquill/parser/SchemaBasedParserMacros.scala new file mode 100644 index 0000000..4e7abe1 --- /dev/null +++ b/ontology-quill-parser/src/main/scala/ru/primetalk/typed/ontology/dbquill/parser/SchemaBasedParserMacros.scala @@ -0,0 +1,71 @@ +package ru.primetalk.typed.ontology.dbquill.parser + +import io.getquill.querySchema +import scala.quoted.* +import ru.primetalk.typed.ontology.simple.meta.{#@, #:, EmptySchema, SchemaLike, SchemaValueType} +import io.getquill.EntityQuery + +object SchemaBasedParserMacros: + def ontqueryImpl[S <: SchemaLike: Type, T <: Tuple: Type, AV <: T #@ S: Type]( + tableName: Expr[String], + svt: Expr[SchemaValueType[S, T]] + )(using Quotes) = + import quotes.reflect.* + val tpe = Type.of[S] + tpe match + case '[p #: s] => + val seqArgs = Expr.ofSeq(propertyAliases[S, T]()) + Apply( + TypeApply(Ref(Symbol.requiredMethod("io.getquill.querySchema")), List(TypeTree.of[T])), + List( + tableName.asTerm, + Typed( + Inlined( + None, + Nil, + Repeated( + // List(Literal(IntConstant(10)), Literal(StringConstant("str")), Literal(DoubleConstant(5.2))), + propertyAliases[S, T]().map(_.asTerm), + TypeTree.of[T => (Any, String)] + ) + ), + Applied(TypeIdent(defn.RepeatedParamClass), List(TypeTree.of[T => (Any, String)])) + ) + ) + ).asExprOf[EntityQuery[T]] + // '{ + // querySchema[T]($tableName, ${seqArgs}*) + // } + + def propertyAliases[S <: SchemaLike: Type, T <: Tuple: Type]( + i: Int = 0, + accum: List[Expr[T => (Any, String)]] = Nil + )(using Quotes): List[Expr[T => (Any, String)]] = + import quotes.reflect.* + + Type.of[S] match + case '[EmptySchema] => + accum.reverse + case '[p #: s] => + propertyAliases[s, T](i + 1, propertyAlias(i, s"column$i") :: accum) + + /** Формируем переименования в соответствии с PropertyAliasExpr + */ + def propertyAlias[T <: Tuple: Type](i: Int, name: String)(using + Quotes + ): Expr[T => (Any, String)] = + import quotes.reflect.* + val mtpe = MethodType(List("t"))(_ => List(TypeRepr.of[T]), _ => TypeRepr.of[(Any, String)]) + val tuple2term = TermRef.apply(TypeRepr.of[Tuple2[Any, String]], "Tuple2") + val lambda = // Block.apply(List(), + Lambda( + Symbol.spliceOwner, + mtpe, + { case (methSym, List(arg1: Term)) => + val _1 = Select(arg1, Symbol.requiredMethod(s"_${i + 1}")).asExprOf[Any] + val _2 = Literal(StringConstant(s"column${i + 1}")).asExprOf[String] + val tu = makeTuple2OfAnyString(_1, _2) + '{ $_1 -> $_2 }.asTerm + } + ) + lambda.asExprOf[T => (Any, String)] diff --git a/ontology-quill-parser/src/main/scala/ru/primetalk/typed/ontology/dbquill/parser/SvtFromExpr.scala b/ontology-quill-parser/src/main/scala/ru/primetalk/typed/ontology/dbquill/parser/SvtFromExpr.scala new file mode 100644 index 0000000..de808ec --- /dev/null +++ b/ontology-quill-parser/src/main/scala/ru/primetalk/typed/ontology/dbquill/parser/SvtFromExpr.scala @@ -0,0 +1,21 @@ +package ru.primetalk.typed.ontology.dbquill.parser + +import scala.quoted.* +import ru.primetalk.typed.ontology.simple.meta.SchemaValueType +import ru.primetalk.typed.ontology.simple.meta.SchemaLike +//import quotidian.* + +given svtFromExpr[S<: SchemaLike: Type, V: Type]: FromExpr[SchemaValueType[S, V]] = + new FromExpr[SchemaValueType[S, V]]: + + /** Return the value of the expression. + * + * Returns `None` if the expression does not represent a value or possibly contains side effects. + * Otherwise returns the `Some` of the value. + */ + def unapply(expr: Expr[SchemaValueType[S, V]])(using Quotes): Option[SchemaValueType[S, V]] = + import quotes.reflect.* + expr match + case '{ new SchemaValueType[`S`, `V`] } => + Some(new SchemaValueType[S, V]) + case _ => None diff --git a/ontology-quill-parser/src/main/scala/ru/primetalk/typed/ontology/dbquill/parser/TupleConverter.scala b/ontology-quill-parser/src/main/scala/ru/primetalk/typed/ontology/dbquill/parser/TupleConverter.scala new file mode 100644 index 0000000..2066791 --- /dev/null +++ b/ontology-quill-parser/src/main/scala/ru/primetalk/typed/ontology/dbquill/parser/TupleConverter.scala @@ -0,0 +1,24 @@ +package ru.primetalk.typed.ontology.dbquill.parser + +/** + * Конвертирует Tuple Scala 3 в обычный Tuple. + * Это по-идее, должно позволить воспользоваться встроенным декодером quill - GenericDecoder. + */ +type TupleConverter[T <: Tuple] = + T match + case a *: EmptyTuple => Tuple1[a] + case a *: b *: EmptyTuple => (a, b) + case a *: b *: c *: EmptyTuple => (a, b, c) + case a *: b *: c *: d *: EmptyTuple => (a, b, c, d) + case a *: b *: c *: d *: e *: EmptyTuple => (a, b, c, d, e) + case a *: b *: c *: d *: e *: f *: EmptyTuple => (a, b, c, d, e, f) + // TODO: convert other 22 tuples or make macro for the same + +inline def tupleConverter[T<:Tuple](t: T): TupleConverter[T] = + inline t match + case _: (a *: EmptyTuple) => val tt = t.asInstanceOf[a*: EmptyTuple];Tuple1(tt._1) + case _: (a *: b *: EmptyTuple) => val tt = t.asInstanceOf[a *: b *: EmptyTuple];(tt._1, tt._2) + case _: (a *: b *: c *: EmptyTuple) => val tt = t.asInstanceOf[a *: b *: c *: EmptyTuple];(tt._1, tt._2, tt._3) + case _: (a *: b *: c *: d *: EmptyTuple) => val tt = t.asInstanceOf[a *: b *: c *: d *: EmptyTuple];(tt._1, tt._2, tt._3, tt._4) + case _: (a *: b *: c *: d *: e *: EmptyTuple) => val tt = t.asInstanceOf[a *: b *: c *: d *: e *: EmptyTuple];(tt._1, tt._2, tt._3, tt._4, tt._5) + case _: (a *: b *: c *: d *: e *: f *: EmptyTuple) => val tt = t.asInstanceOf[a *: b *: c *: d *: e *: f *: EmptyTuple];(tt._1, tt._2, tt._3, tt._4, tt._5, tt._6) diff --git a/ontology-quill/docker-compose.yml b/ontology-quill/docker-compose.yml new file mode 100644 index 0000000..eed3663 --- /dev/null +++ b/ontology-quill/docker-compose.yml @@ -0,0 +1,16 @@ +# run with "docker compose up -d" +version: '3.7' + +services: + postgres: + image: postgres:latest + restart: always + volumes: + - ./postgres-data:/var/lib/postgresql/data + - ./src/main/resources/init.sql:/docker-entrypoint-initdb.d/init.sql + environment: + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=12345 + - POSTGRES_DB=items + ports: + - "5432:5432" \ No newline at end of file diff --git a/ontology-quill/src/main/resources/application.conf b/ontology-quill/src/main/resources/application.conf new file mode 100644 index 0000000..d547c7a --- /dev/null +++ b/ontology-quill/src/main/resources/application.conf @@ -0,0 +1,7 @@ +testPostgresDB { + dataSourceClassName=org.postgresql.ds.PGSimpleDataSource + dataSource.databaseName=items + dataSource.url="jdbc:postgresql://localhost:5432/items" + dataSource.user=postgres + dataSource.password=12345 +} diff --git a/ontology-quill/src/main/resources/init.sql b/ontology-quill/src/main/resources/init.sql new file mode 100644 index 0000000..6a67f32 --- /dev/null +++ b/ontology-quill/src/main/resources/init.sql @@ -0,0 +1,10 @@ +CREATE TABLE IF NOT EXISTS item ( + id serial PRIMARY KEY NOT NULL, + name VARCHAR NOT NULL, + price NUMERIC(21, 2) NOT NULL +); +CREATE TABLE IF NOT EXISTS person ( + first_name text NOT NULL, + last_name text NOT NULL, + age integer NOT NULL +); diff --git a/ontology-quill/src/main/scala/ru/primetalk/typed/ontology/dbquill/OldPerson.scala b/ontology-quill/src/main/scala/ru/primetalk/typed/ontology/dbquill/OldPerson.scala new file mode 100644 index 0000000..56fb313 --- /dev/null +++ b/ontology-quill/src/main/scala/ru/primetalk/typed/ontology/dbquill/OldPerson.scala @@ -0,0 +1,64 @@ +package ru.primetalk.typed.ontology.dbquill + +import io.getquill.* +import io.getquill.context.ContextOperation +import ru.primetalk.typed.ontology.simple.meta.SchemaProvider +import ru.primetalk.typed.ontology.dbquill.parser.{CustomOps, CustomParser} +import io.getquill.ast.{Ast, Entity} +import io.getquill.quat.Quat + +case class OldPerson(firstName: String, lastName: String, age: Int) derives SchemaProvider + +object OldPersonApp { + + val provider = SchemaProvider[OldPerson] + val personSchema = provider.schema + println(personSchema) + + given customParser: CustomParser.type = CustomParser + + import CustomOps.** + // SnakeCase turns firstName -> first_name + val ctx = new PostgresJdbcContext(SnakeCase, "testPostgresDB") + import ctx._ + + def main(args: Array[String]): Unit = { + inline def personQuillSchema = quote { + querySchema[OldPerson]("person") + } + println(s"AST=${personQuillSchema.ast}") + println(s"AST.quat=${personQuillSchema.ast.quat}") + println(s"lifts=${personQuillSchema.lifts}") + personQuillSchema.ast match + case e@Entity(name, properties, quat) => + println(s"Entity($name, $properties, $quat, renamable = ${e.renameable})") + println(s"Quat.Product(${quat.name}, ${quat.fields}, ${quat.renames}, ${quat.tpe})") + // val name: String, + // val fields: mutable.LinkedHashMap[String, Quat], + // override val renames: mutable.LinkedHashMap[String, String], + // val tpe: Quat.Product.Type + // quat match + // case Quat.Product() => + + case _ => + inline def personQuillSchemaAgePower = quote { + personQuillSchema.map(p => p.age ** 2) + } + inline def somePeople(named: String) = quote { + personQuillSchema.filter(p => p.firstName == lift(named)) + } + val people: List[OldPerson] = run(somePeople("Joe")) + + val agePowered: List[Double] = run(personQuillSchemaAgePower) + // val make = ContextOperation.Factory[ctx.idiom.type, ctx.naming.type, PrepareRow, ResultRow, Session, ctx.type](ctx.idiom, ctx.naming) + // val o = make.op[Nothing, OldPerson, Result[RunQueryResult[OldPerson]]] + + // o.apply { arg => + // println(arg.sql) + // ??? + // } +// summon[ToSql[EntityQuery[Person]]]. + //println(somePeople("Joe")) // TODO Get SQL + println(people) + } +} diff --git a/ontology-quill/src/main/scala/ru/primetalk/typed/ontology/dbquill/OntPerson.scala b/ontology-quill/src/main/scala/ru/primetalk/typed/ontology/dbquill/OntPerson.scala new file mode 100644 index 0000000..f8dcf49 --- /dev/null +++ b/ontology-quill/src/main/scala/ru/primetalk/typed/ontology/dbquill/OntPerson.scala @@ -0,0 +1,91 @@ +package ru.primetalk.typed.ontology.dbquill + + +import io.getquill.* +import io.getquill.context.ContextOperation +import ru.primetalk.typed.ontology.simple.meta.SchemaProvider + +import ru.primetalk.typed.ontology.dbquill.parser.SchemaBasedParser +import ru.primetalk.typed.ontology.dbquill.parser.MyTestEntity +import ru.primetalk.typed.ontology.dbquill.parser.ontquery +import ru.primetalk.typed.ontology.simple.meta.SimpleTypes.{given, *} +import ru.primetalk.typed.ontology.simple.meta.{#@, annotated, SchemaValueType} +import java.time.LocalDateTime +import io.getquill.ast.Entity +import io.getquill.quat.Quat +import scala.collection.mutable.LinkedHashMap +import io.getquill.ast.Renameable +import scala.quoted.Type +import io.getquill.generic.GenericDecoder +import io.getquill.generic.DecodingType +import ru.primetalk.typed.ontology.dbquill.parser.TupleConverter + +object OntPerson { + given schemaBasedParser: SchemaBasedParser.type = SchemaBasedParser + val ctx = new PostgresJdbcContext(SnakeCase, "testPostgresDB") + import ctx._ + + import SchemaBasedParser.svtGenericDecoder + import SchemaBasedParser.svtGenericEncoder + +// val svtOrder = summon[SchemaValueType[Order.TableSchema, (Int, LocalDateTime)]] + given svtOrder1: SchemaValueType[Order.TableSchema, Order.svt.Value] = Order.svt + + def main(args: Array[String]): Unit = { + def personQuillSchema = + Quoted[OldPerson]( + Entity.Opinionated("person", Nil, + Quat.Product.apply( + "OldPerson", + Quat.Product.Type.Concrete, + LinkedHashMap("firstName" -> Quat.Value, "lastName" -> Quat.Value, "age" -> Quat.Value), + // LinkedHashMap() + ), + Renameable.Fixed + ), + Nil, + Nil + ) +// AST=`querySchema`("person") +// AST.quat=OldPerson(firstName:V,lastName:V,age:V) +// lifts=List() +// Entity(person, List(), OldPerson(firstName:V,lastName:V,age:V), renamable = Fixed) +// Quat.Product(OldPerson, LinkedHashMap(firstName -> V, lastName -> V, age -> V), LinkedHashMap(), Concrete) + + println(s"AST=${personQuillSchema.ast}") + println(s"AST.quat=${personQuillSchema.ast.quat}") + println(s"lifts=${personQuillSchema.lifts}") + personQuillSchema.ast match + case e@Entity(name, properties, quat) => + println(s"Entity($name, $properties, $quat, renamable = ${e.renameable})") + println(s"Quat.Product(${quat.name}, ${quat.fields}, ${quat.renames}, ${quat.tpe})") + case _ => + + inline def orderQuery = quote { + // Order.query(using Order.svt) + // либо для произвольной схемы: + ontquery[Order.TableSchema, Order.Row, Order.svt.AValue]("order")//(using Order.svt) + } + // inline given svtGenericDecoder[ResultRow: Type, Session] + // : GenericDecoder[ResultRow, Session, Order.Row, DecodingType.Specific] = + // new: + // def apply(i: Int, rr: ResultRow, s: Session): Order.Row = + // var res: Order.svt.Value | Null = null + // val a = res.annotated[Order.TableSchema] + // res.asInstanceOf[Order.Row] + + val orders = run(orderQuery) + + // println(run(MyTestEntityQuery).string) + // val make = ContextOperation.Factory[ctx.idiom.type, ctx.naming.type, PrepareRow, ResultRow, Session, ctx.type](ctx.idiom, ctx.naming) + // val o = make.op[Nothing, OldPerson, Result[RunQueryResult[OldPerson]]] + + // o.apply { arg => + // println(arg.sql) + // ??? + // } +// summon[ToSql[EntityQuery[Person]]]. + //println(somePeople("Joe")) // TODO Get SQL + // println(orders) + } +} diff --git a/ontology-quill/src/main/scala/ru/primetalk/typed/ontology/dbquill/ontology.scala b/ontology-quill/src/main/scala/ru/primetalk/typed/ontology/dbquill/ontology.scala new file mode 100644 index 0000000..24fa9dd --- /dev/null +++ b/ontology-quill/src/main/scala/ru/primetalk/typed/ontology/dbquill/ontology.scala @@ -0,0 +1,21 @@ +package ru.primetalk.typed.ontology.dbquill + + +import ru.primetalk.typed.ontology.simple.meta._ +import ru.primetalk.typed.ontology.metameta.OntologyType.Record +import java.time.LocalDateTime +import SimpleTypes.{given, *} +import ru.primetalk.typed.ontology.dbquill.parser.TupleConverter + +object Order extends TableBuilder: + object id extends column[Int] + val id1 = id + type Id = id1.type + object date extends column[LocalDateTime] + type Date = date.type + type TableSchema = Id #: Date #: EmptySchema + val tableSchema: TableSchema = fields(id, date) + val ts = fields(id, date) + type TS = ts.Type + val svt = summon[SchemaValueType.Aux1[TableSchema]] + type Row = TupleConverter[svt.Value] diff --git a/ontology-quill/src/test/scala/ru/primetalk/typed/ontology/dbquill/Spec.scala b/ontology-quill/src/test/scala/ru/primetalk/typed/ontology/dbquill/Spec.scala new file mode 100644 index 0000000..72743cb --- /dev/null +++ b/ontology-quill/src/test/scala/ru/primetalk/typed/ontology/dbquill/Spec.scala @@ -0,0 +1,8 @@ +package ru.primetalk.typed.ontology.dbquill + +import org.scalatest.BeforeAndAfterAll +import org.scalatest.freespec.AnyFreeSpec +import org.scalatest.matchers.must.Matchers + +abstract class Spec extends AnyFreeSpec with Matchers with BeforeAndAfterAll { +} diff --git a/ontology-quill/src/test/scala/ru/primetalk/typed/ontology/dbquill/TupleSpec.scala b/ontology-quill/src/test/scala/ru/primetalk/typed/ontology/dbquill/TupleSpec.scala new file mode 100644 index 0000000..4ca703a --- /dev/null +++ b/ontology-quill/src/test/scala/ru/primetalk/typed/ontology/dbquill/TupleSpec.scala @@ -0,0 +1,87 @@ +package ru.primetalk.typed.ontology.dbquill + +import scala.language.implicitConversions + +import io.getquill.Quoted + +import io.getquill.ast._ +import io.getquill.QuotationLot +import io.getquill.QuotationVase +import io.getquill.context.ExecutionType +import org.scalatest._ +import io.getquill.quat.quatOf +import io.getquill.context.ExecutionType.Static +import io.getquill.context.ExecutionType.Dynamic +import io.getquill.generic.GenericDecoder +import io.getquill.generic.GenericRowTyper +import io.getquill.generic.GenericColumnResolver +import scala.quoted._ +import scala.deriving._ +import scala.compiletime.{erasedValue, constValue, summonFrom} +import scala.collection.mutable.LinkedHashMap +import scala.reflect.ClassTag +import scala.reflect.classTag +import io.getquill.context.mirror.Row +import io.getquill.quote +import io.getquill.query +import io.getquill.context.mirror.MirrorSession +import io.getquill.* +import ru.primetalk.typed.ontology.dbquill.parser.TupleConverter + +class TupleSpec extends Spec { + + val ctx = new MirrorContext[MirrorSqlDialect, Literal](MirrorSqlDialect, Literal) with MirrorColumnResolving[MirrorSqlDialect, Literal] + import ctx.{given, _} + + "simple examples" - { + val s = MirrorSession.default + + "test tuple type" in { + val IdentP = Ident("p", quatOf[OldPerson]) + val OldPersonQuat = quatOf[OldPerson].probit + + inline def q = quote { query[OldPerson].map(p => (p.firstName, p.age)) } + q.ast mustEqual Map(Entity("OldPerson", List(), `OldPersonQuat`), IdentP, ast.Tuple(List(Property(IdentP, "firstName"),Property(IdentP, "age")))) + val result = ctx.run(q) + + val tupleRow = Row("_1" -> "Joe", "_2" -> 123) + result.extractor(tupleRow, s) mustEqual ("Joe", 123) + } + + "test case class type" in { + inline def q = quote { query[OldPerson] } + val result = ctx.run(q) + + val tupleRow = Row("firstName" -> "Joe", "lastName" -> "Dow", "age" -> 123) + result.extractor(tupleRow, s) mustEqual OldPerson("Joe", "Dow", 123) + } + "test tuple compatibility" in { + type T1 = (String, Int) + type T2 = String *: Int *: EmptyTuple + summon[T1 =:= T2] + val t1: T1 = ("", 0) + val t2: T2 = t1 + val t11: T1 = t2 + t1._2 mustEqual t2._2 + } + "test tuple quat" in { + val q1 = quatOf[(String, Int)] + type StringInt = String *: Int *: EmptyTuple + val q2 = quatOf[StringInt] + val q3 = quatOf[TupleConverter[StringInt]] + q1 mustEqual q3 + q1 mustNot be(q2) + } + "test tuple entity" in { + val IdentT = Ident("t", quatOf[(String, Int)]) + val Tuple2Quat = quatOf[(String, Int)].probit + type PersonT = (String, Int) + inline def q = quote { query[PersonT].map(t => (t._1, t._2)) } + q.ast mustEqual Map(Entity("Tuple2", List(), `Tuple2Quat`), IdentT, ast.Tuple(List(Property(IdentT, "_1"),Property(IdentT, "_2")))) + val result = ctx.run(q) + + val tupleRow = Row("_1" -> "Joe", "_2" -> 123) + result.extractor(tupleRow, s) mustEqual ("Joe", 123) + } + } +} diff --git a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/listrelation/JointSchema.scala b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/listrelation/JointSchema.scala index 1a6158c..bd6b38e 100644 --- a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/listrelation/JointSchema.scala +++ b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/listrelation/JointSchema.scala @@ -6,26 +6,26 @@ import ru.primetalk.typed.ontology.simple.meta.ForeignKeyId0 case class JointSchema[S1 <: RecordSchema, S2 <: RecordSchema](schema1: S1, schema2: S2)( val joinSchema: RecordSchema.Concat[S1, S2] -): - transparent inline def concatValues( - inline d1: schema1.Values, - inline d2: schema2.Values - ): joinSchema.Values = - (d1 ++ d2).asInstanceOf[joinSchema.Values] +) +// transparent inline def concatValues( +// inline d1: schema1.Values, +// inline d2: schema2.Values +// ): joinSchema.Values = +// (d1 ++ d2).asInstanceOf[joinSchema.Values] - transparent inline def leftInnerJoin[FK <: ForeignKeyId0]( - inline fk: FK - )(data1: List[schema1.Values], data2: List[schema2.Values]): List[joinSchema.Values] = - for - el1 <- data1 - el2 <- data2 - if schema1.get(fk.left)(el1) == schema2.get(fk.right)(el2) - yield concatValues(el1, el2) +// transparent inline def leftInnerJoin[FK <: ForeignKeyId0]( +// inline fk: FK +// )(data1: List[schema1.Values], data2: List[schema2.Values]): List[joinSchema.Values] = +// for +// el1 <- data1 +// el2 <- data2 +// if schema1.get(fk.left)(el1) == schema2.get(fk.right)(el2) +// yield concatValues(el1, el2) -object JointSchema: - transparent inline def join[ - S1 <: RecordSchema, - S2 <: RecordSchema - ](inline schema1: S1, inline schema2: S2) = - val joinSchema = schema1.concat[S2](schema2) - JointSchema(schema1, schema2)(joinSchema) +// object JointSchema: +// transparent inline def join[ +// S1 <: RecordSchema, +// S2 <: RecordSchema +// ](inline schema1: S1, inline schema2: S2) = +// val joinSchema = schema1.concat[S2](schema2) +// JointSchema(schema1, schema2)(joinSchema) diff --git a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/listrelation/RelationList.scala b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/listrelation/RelationList.scala index 6b42640..f41e02c 100644 --- a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/listrelation/RelationList.scala +++ b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/listrelation/RelationList.scala @@ -10,67 +10,81 @@ import scala.quoted.* import scala.reflect.ClassTag import scala.compiletime.ops.int.S import scala.compiletime.constValue -import ru.primetalk.typed.ontology.simple.meta.RecordSchema import ru.primetalk.typed.ontology.simple.meta.ForeignKeyId0 +import ru.primetalk.typed.ontology.simple.meta.RecordSchema +import ru.primetalk.typed.ontology.simple.meta.SchemaValueType +import ru.primetalk.typed.ontology.simple.meta.Projector +import ru.primetalk.typed.ontology.simple.meta.RecordSchemaValueType /** A simple version of relation that doesn't depend on cats and keep data in a List. */ trait RelationList: type Schema <: RecordSchema + val schema: Schema + val svt: SchemaValueType.Aux1[Schema] // = summon[SchemaValueType[schema.type]] - type Values = schema.Values + type Row = svt.Value + type Values = Row val rows: List[Values] sealed trait WithFk: type FK <: ForeignKeyId0 val fk: FK - transparent inline def join[R2 <: RelationList](r2: R2) = - val jointSchema: JointSchema[schema.type, r2.schema.type] = JointSchema - .join[schema.type, r2.schema.type](schema, r2.schema) - jointSchema - .leftInnerJoin[FK](fk)(rows, r2.rows) + // transparent inline def join[R2 <: RelationList](r2: R2) = + // val jointSchema: JointSchema[schema.type, r2.schema.type] = JointSchema + // .join[schema.type, r2.schema.type](schema, r2.schema) + // jointSchema + // .leftInnerJoin[FK](fk)(rows, r2.rows) transparent inline def withFk[FK <: ForeignKeyId0](fk1: FK) = new WithFk { type FK = fk1.type val fk = fk1 } - transparent inline def projection[S2 <: RecordSchema](s2: S2) = - val f = s2.projectorFrom(schema) - val v = rows.map(f) + transparent inline def projection[S2 <: RecordSchema, VS2](s2: S2)(using + prj: Projector[Schema, Row, S2, VS2], + ev: this.Row =:= prj.from.Value, + ev2: prj.to.type =:= SchemaValueType.Aux1[S2] + ) = + val svtS2: SchemaValueType.Aux1[S2] = ev2(prj.to) + val v = rows.map(v => prj.apply(v)) new RelationList { - type Schema = s2.type - val schema = s2 - val rows = v + type Schema = S2 + val schema = s2 + val svt: SchemaValueType.Aux1[S2] = svtS2 + val rows = v.asInstanceOf[List[Values]] } object RelationList: - transparent inline def apply[S <: RecordSchema](s: S)(inline data: List[s.Values]) = + transparent inline def apply[S <: RecordSchema]( + s: S + )(using svt: SchemaValueType.Aux1[S])(inline data: List[svt.Value]) = new RelationList { - type Schema = s.type - val schema = s - val rows = data + type Schema = S + val schema = s + val svt: SchemaValueType.Aux1[S] = summon[SchemaValueType.Aux1[S]] + val rows = data.asInstanceOf[List[Row]] } import RecordSchema.Concat -transparent inline def fullInnerJoin[T1 <: RelationList, T2 <: RelationList, FK <: ForeignKeyId0]( - table1: T1, - table2: T2, - inline fk: FK -): List[Tuple.Concat[table1.Values, table2.Values]] = - for - row1 <- table1.rows - row2 <- table2.rows - if table1.schema.get(fk.left)(row1) == table2.schema.get(fk.right)(row2) - yield row1 ++ row2 +// transparent inline def fullInnerJoin[T1 <: RelationList, T2 <: RelationList, FK <: ForeignKeyId0]( +// table1: T1, +// table2: T2, +// inline fk: FK +// ): List[Tuple.Concat[table1.Values, table2.Values]] = +// for +// row1 <- table1.rows +// row2 <- table2.rows +// if table1.schema.get(fk.left)(row1) == table2.schema.get(fk.right)(row2) +// yield row1 ++ row2 -transparent inline def crossProduct[T1 <: RelationList, T2 <: RelationList]( - table1: T1, - table2: T2 -): List[Tuple.Concat[table1.Values, table2.Values]] = - for - row1 <- table1.rows - row2 <- table2.rows - yield row1 ++ row2 +// transparent inline def crossProduct[T1 <: RelationList, T2 <: RelationList]( +// table1: T1, +// table2: T2 +// ): List[Tuple.Concat[table1.Values, table2.Values]] = +// for +// row1 <- table1.rows +// row2 <- table2.rows +// yield row1 ++ row2 diff --git a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/Projector.scala b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/Projector.scala index e386074..1074055 100644 --- a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/Projector.scala +++ b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/Projector.scala @@ -1,12 +1,20 @@ package ru.primetalk.typed.ontology.simple.meta -trait Projector[From <: SchemaLike, To <: SchemaLike]: - val from: SchemaValueType.Aux1[From] - val to: SchemaValueType.Aux1[To] - def apply(v: from.Value): to.Value +trait PropertyProjector[From <: RecordSchema, VFrom, P <: SimplePropertyId[?, VP], VP]: + val from: SchemaValueType[From, VFrom] + val rpvt: RecordPropertyValueType[P, VP] + type Value = VP + def apply(v: VFrom): VP -trait Concatenator[A <: RecordSchema, B <: RecordSchema]: - val aSvt: RecordSchemaValueType[A] - val bSvt: RecordSchemaValueType[B] - val abSvt: RecordSchemaValueType[RecordSchema.Concat[A, B]] - def apply(a: aSvt.Value, b: bSvt.Value): abSvt.Value +trait Projector[From <: SchemaLike, VFrom, To <: SchemaLike, VTo]: + val from: SchemaValueType[From, VFrom] + val to: SchemaValueType[To, VTo] + def apply(v: VFrom): VTo + +trait Concatenator[A <: RecordSchema, VA, B <: RecordSchema, VB, VAB]: + val aSvt: SchemaValueType[A, VA] + val bSvt: SchemaValueType[B, VB] + type Schema = RecordSchema.Concat[A, B] + def schemaConcat(a: A, b: B): Schema // r1.schema.appendOtherSchema(schema) + val abSvt: SchemaValueType[Schema, VAB] + def apply(a: VA, b: VB): VAB diff --git a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/RecordProperty.scala b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/RecordProperty.scala index 8bf7aff..ae5a844 100644 --- a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/RecordProperty.scala +++ b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/RecordProperty.scala @@ -8,7 +8,7 @@ sealed trait RecordProperty0: // Record phantom type type R // Property schema - type P <: SchemaLike + type Schema <: SchemaLike val name: String @@ -19,7 +19,7 @@ sealed trait RecordProperty[A] extends RecordProperty0: /** Metainformation about property with a known schema. */ abstract class SchemaBasedPropertyId[A, S <: SchemaLike](name1: String, val schema: S) extends RecordProperty[A]: - type P = S + type Schema = schema.type val name: String = name1 @@ -32,12 +32,13 @@ abstract class SchemaBasedPropertyId[A, S <: SchemaLike](name1: String, val sche /** Metainformation about property. Contains unique name (within the type) and type of the value. * Might contain other metainformation about property, like Schema. */ -abstract class SimplePropertyId[A, B: ClassTag](name1: String) extends SchemaBasedPropertyId[A, ScalarSchema1[B]](name1, summon[ScalarSchema1[B]]) +abstract class SimplePropertyId[A, B: ClassTag](name1: String)(using schema1: ScalarSchema1[B]) + extends SchemaBasedPropertyId[A, ScalarSchema1[B]](name1, schema1: ScalarSchema1[B]) -object RecordProperty0: - type PropertyValueType[A] = A match - case SimplePropertyId[_, p] => p - case _ => Nothing +// object RecordProperty0: +// type PropertyValueType[A] = A match +// case SimplePropertyId[_, p] => p +// case _ => Nothing trait PropertiesBuilder extends RecordSchemaBuilderBase: transparent inline def property[T: ClassTag](inline name: String) = diff --git a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/Schema.scala b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/Schema.scala index 2ed8865..0a4aa3b 100644 --- a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/Schema.scala +++ b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/Schema.scala @@ -28,17 +28,17 @@ sealed trait SchemaLike: sealed trait ScalarSchema extends SchemaLike +/** Schema for simple Scala types. + */ sealed trait ScalarSchema1[T] extends ScalarSchema -object ScalarSchema: - val BooleanScalarSchema = summon[ScalarSchema1[Boolean]] - val IntScalarSchema = summon[ScalarSchema1[Int]] - val StringScalarSchema = summon[ScalarSchema1[String]] - object ScalarSchema1: - given [T: ClassTag]: ScalarSchema1[T] = + transparent inline given [T: ClassTag]: ScalarSchema1[T] = new ScalarSchema1[T]: def tpeRepr: String = summon[ClassTag[T]].runtimeClass.getSimpleName + val BooleanScalarSchema = summon[ScalarSchema1[Boolean]] + val IntScalarSchema = summon[ScalarSchema1[Int]] + val StringScalarSchema = summon[ScalarSchema1[String]] sealed trait TupleSchema extends SchemaLike: type Schemas <: Tuple @@ -48,8 +48,11 @@ sealed trait TupleSchema extends SchemaLike: case object EmptyTupleSchema extends TupleSchema: type Schemas = EmptyTuple val schemas: Schemas = EmptyTuple + +sealed trait NonEmptyTupleSchema0 extends TupleSchema + final case class NonEmptyTupleSchema[HS <: SchemaLike, TS <: TupleSchema](h: HS, t: TS) - extends TupleSchema: + extends NonEmptyTupleSchema0: type Schemas = h.type *: t.Schemas val schemas: Schemas = h *: t.schemas @@ -63,17 +66,19 @@ sealed trait RecordSchema extends SchemaLike: type Type = self.type type Properties <: Tuple + val properties: Properties def tpeRepr: String = properties.toIArray.mkString(", ") - type PropertySet = Tuple.Union[Properties] & RecordProperty0 + type PropertySet = RecordProperty0 & Tuple.Fold[Properties, RecordProperty0, [x, y] =>> x & y] /** A type function that applies a given type function to each property. */ type PropertiesMap[F[_]] = Tuple.Map[Properties, F] type IndexOfProp[P <: RecordProperty0] = RecordSchema.IndexOfTypeInTuple[Properties, P] + transparent inline def indexOfProp[P2 <: RecordProperty0](inline p2: P2): IndexOfProp[P2] = RecordSchema.indexOfProp(this, p2) @@ -83,17 +88,23 @@ sealed trait RecordSchema extends SchemaLike: S2 match case EmptySchema => EmptyTuple case SchemaCons[p, s] => IndexOfProp[p] *: IndicesOfProps[s] + transparent inline def indicesOfProps[S2 <: RecordSchema](s2: S2): IndicesOfProps[s2.type] = constValueTuple[IndicesOfProps[s2.type]] + // type Concat1[Y <: RecordSchema] <: RecordSchema = + // this.type match + // case EmptySchema => Y + // case SchemaCons[x1, xs1] => SchemaCons[x1, Concat[xs1, Y]] + /** Concatenates properties of another schema. */ // transparent inline def concat[S2 <: RecordSchema, This >: this.type <: RecordSchema](inline schema2: S2): RecordSchema.Concat[This, schema2.type] = - inline def concat[S2 <: RecordSchema](inline schema2: S2): RecordSchema.Concat[this.type, S2] = - inline this match - case _: EmptySchema => - schema2 - case sc: SchemaCons[p, s] => - sc.p #: sc.schema.concat(schema2) + // inline def concat[S2 <: RecordSchema](inline schema2: S2): RecordSchema.Concat[this.type, S2] = + // inline this match + // case _: EmptySchema => + // schema2 + // case sc: SchemaCons[p, s] => + // sc.p #: sc.schema.concat(schema2) /** Type of the concatenation of two schemas. */ type PrependOtherSchema[S1 <: RecordSchema] <: RecordSchema = @@ -115,16 +126,23 @@ sealed trait RecordSchema extends SchemaLike: /** Type of the concatenation of two schemas. */ type AppendOtherSchema[S2 <: RecordSchema] <: RecordSchema - transparent inline def appendOtherSchema[S2 <: RecordSchema](inline s2: S2): AppendOtherSchema[S2] + transparent inline def appendOtherSchema[S2 <: RecordSchema]( + s2: S2 + ): RecordSchema.Concat[Type, S2] @targetName("SchemaCons") - inline def #:[P <: RecordProperty0, This >: this.type <: RecordSchema](p: P): P #: This = - SchemaCons[P, This](p, this) + inline def #:[P <: RecordProperty0](p: P): P #: Type = + SchemaCons[P, Type](p, this) + + transparent inline def ##:[Other <: RecordSchema, This >: this.type <: RecordSchema]( + inline other: Other + ) = + appendOtherSchema(other) transparent inline def replace[P1 <: RecordProperty0, P2 <: RecordProperty0]( inline p1: P1, inline p2: P2 - ): RecordSchema + ): RecordSchema.Replace[P1, P2, this.type] /** Replaces properties of the same type. A bit more restricted version of replace. */ transparent inline def rename[T, P1 <: RecordProperty[T], P2 <: RecordProperty[T]]( @@ -135,110 +153,11 @@ sealed trait RecordSchema extends SchemaLike: type Remove[P1 <: RecordProperty0] <: RecordSchema - transparent inline def remove[P1 <: RecordProperty0](inline p1: P1): Remove[P1] - - // The subsequent elements might eventually be moved from schema. - // Currently it doesn't seem to work outside of this trait... - - /** Simple tuple representing an instance of this schema. */ - type Values = PropertiesMap[RecordProperty0.PropertyValueType] - - // Higher order operation on each value. - // Could be used to construct more complex data structures. For instance, tuple of options, or tuple of Either[Error, _] - type ValuesMap[H[_]] = Tuple.Map[Values, H] - - def get[P2](p2: P2)(v: Values): Option[RecordProperty0.PropertyValueType[p2.type]] - - def convertToMap(v: Values, m: Map[String, Any] = Map()): Map[String, Any] - - type ValueAt[I] = - I match - case Int => - RecordSchema.ValueAt[this.type, I] - case _ => - Nothing - transparent inline def getByIndex[I <: Int](inline i: I): Values => ValueAt[I] = v => - RecordSchema.valueAt[this.type, I](this, i)(v) - - def getByIndexRuntime(i: Int)(v: Values): Any = - scala.runtime.Tuples.apply(v.asInstanceOf[NonEmptyTuple], i) - - transparent inline def concatValues[S2 <: RecordSchema](schema2: S2)( - schema3: RecordSchema.Concat[this.type, S2] - ): (Values, schema2.Values) => schema3.Values = - (v1, v2) => (v1 ++ v2).asInstanceOf[schema3.Values] - - transparent inline def prependValues[S1 <: RecordSchema](schema1: S1)( - schema3: PrependOtherSchema[S1] - ): (schema1.Values, Values) => schema3.Values = - (v1, v2) => (v1 ++ v2).asInstanceOf[schema3.Values] - - transparent inline def appendValues[S2 <: RecordSchema](schema2: S2)( - schema3: AppendOtherSchema[S2] - ): (Values, schema2.Values) => schema3.Values = - (v1, v2) => (v1 ++ v2).asInstanceOf[schema3.Values] - - type PropertyGetter[P <: RecordProperty0] = - RecordSchema.IsPropertyInSchema[P, this.type] match - case true => - Values => RecordProperty0.PropertyValueType[P] - case false => - Values => Nothing - - transparent inline def propertyGetter[This >: this.type <: RecordSchema, P <: RecordProperty0]( - p2: P - ): RecordSchema.PropertyGetter[Values, p2.type] - - type PropertyGetter2[P <: RecordProperty0] <: Values => RecordProperty0.PropertyValueType[P] = - this.type match - case EmptySchema => Nothing - case SchemaCons[`P`, _] => Values => RecordProperty0.PropertyValueType[P] - case SchemaCons[_, s] => Values => RecordProperty0.PropertyValueType[P] - - // transparent inline def propertyGetter2[P <: RecordProperty0](p: P): PropertyGetter2[p.type] = - // inline this match - // case _ : EmptySchema => scala.compiletime.error(s"property $p not found") - // case sc : SchemaCons[`P`, _] => (v: Values) => sc.v.head.asInstanceOf[RecordProperty0.PropertyValueType[p.type]] - // case sc : SchemaCons[_, s] => (v: Values) => v.tail.asInstanceOf[s.Values] - - type PropertyUpdater[P <: RecordProperty0] = - Values => RecordProperty0.PropertyValueType[P] => Values - transparent inline def propertyUpdater[This >: this.type <: RecordSchema, P <: RecordProperty0]( - inline p: P - ): PropertyUpdater[P] - - transparent inline def projectorFrom[S1 <: RecordSchema](s1: S1): s1.Values => Values - - transparent inline def projection[S2 <: RecordSchema]( - inline schema2: S2 - ): Any = // Values => S2#Values = - schema2.projectorFrom(this) - - type OptionValues = ValuesMap[Option] - - transparent inline def transformOption: OptionValues => Option[Values] - - type EitherValues[E] = ValuesMap[[V] =>> Either[E, V]] - - transparent inline def transformEither[E]: EitherValues[E] => Either[List[E], Values] - - transparent inline def fkPredicate[FK <: ForeignKeyId0](fk: FK): Values => Boolean = - val l = propertyGetter(fk.left) - val r = propertyGetter(fk.right) - row => l(row) == r(row) - - extension (values: Values) - transparent inline def apply[P <: RecordProperty0](p: P) = - self.propertyGetter(p)(values) - transparent inline def updated[P <: RecordProperty0](inline p: P)( - inline v: RecordProperty0.PropertyValueType[P] - ) = - self.propertyUpdater(p)(values)(v) + transparent inline def remove[P1 <: RecordProperty0](p1: P1): Remove[P1] type EmptySchema = EmptySchema.type case object EmptySchema extends RecordSchema: - import RecordSchema._ type R = Nothing @@ -249,160 +168,80 @@ case object EmptySchema extends RecordSchema: def unapply(e: EmptySchema): true = true type AppendOtherSchema[S2 <: RecordSchema] = S2 + + inline def concat[S2 <: RecordSchema](inline schema2: S2): S2 = + schema2 + transparent inline def appendOtherSchema[S2 <: RecordSchema]( - inline s2: S2 - ): AppendOtherSchema[S2] = + s2: S2 + ): RecordSchema.Concat[Type, S2] = s2 transparent inline def replace[P1 <: RecordProperty0, P2 <: RecordProperty0]( inline p1: P1, inline p2: P2 - ): RecordSchema = + ): RecordSchema.Replace[P1, P2, this.type] = EmptySchema type Remove[P1 <: RecordProperty0] = EmptySchema - transparent inline def remove[P1 <: RecordProperty0](inline p1: P1): Remove[P1] = EmptySchema - - def get[P2](p2: P2)(v: Values): Option[RecordProperty0.PropertyValueType[p2.type]] = - None - def convertToMap(v: Values, m: Map[String, Any] = Map()): Map[String, Any] = - m - - transparent inline def propertyGetter[This >: this.type <: RecordSchema, P <: RecordProperty0]( - p2: P - ): RecordSchema.PropertyGetter[Values, p2.type] = - sys.error(s"There is no property getter for $p2 in empty schema") - // ${propertyGetterImpl[This, this.Values, p2.type]('this, 'p2)} - // transparent inline def propertyGetter[ - // This >: this.type <: RecordSchema, - // P<: RecordProperty0](p: P): PropertyGetter[p.type] = - // sys.error(s"There is no property getter for $p in empty schema") - transparent inline def propertyUpdater[This >: this.type <: RecordSchema, P <: RecordProperty0]( - inline p: P - ): PropertyUpdater[P] = - sys.error(s"There is no property updater for $p in empty schema") - - transparent inline def projectorFrom[S1 <: RecordSchema](s1: S1): s1.Values => Values = - _ => EmptyTuple - transparent inline def transformOption: OptionValues => Option[Values] = - _ => Some(EmptyTuple) - - transparent inline def transformEither[E]: EitherValues[E] => Either[List[E], Values] = - _ => Right(EmptyTuple) + transparent inline def remove[P1 <: RecordProperty0](p1: P1): Remove[P1] = EmptySchema sealed trait NonEmptySchema extends RecordSchema: type Properties <: NonEmptyTuple - type ValuesElem[I <: Int] = Tuple.Elem[Values, I] - transparent inline def valueAt[N <: Int](n: N): Values => Tuple.Elem[Values, N] = - v => v.asInstanceOf[NonEmptyTuple].apply(n).asInstanceOf[Tuple.Elem[Values, N]] final case class SchemaCons[P <: RecordProperty0, S <: RecordSchema](p: P, schema: S) extends NonEmptySchema: import RecordSchema._ + require(p != null, "Property identifier should not be null") type ParentSchemaOrNothing = schema.type type Properties = p.type *: schema.Properties - type PValue = RecordProperty0.PropertyValueType[Tuple.Head[Properties]] // p.type val properties: Properties = p *: schema.properties def parentSchemaOrNothing: ParentSchemaOrNothing = schema - def get[P2](p2: P2)(v: Values): Option[RecordProperty0.PropertyValueType[p2.type]] = - val head *: (tail: schema.Values) = v - if p2 == p then Some(head.asInstanceOf[RecordProperty0.PropertyValueType[p2.type]]) - else schema.get(p2)(tail) - - transparent inline def get3(p1: p.type)(v: Values): PValue = - v.head.asInstanceOf[PValue] - - def convertToMap(v: Values, m: Map[String, Any] = Map()): Map[String, Any] = - val head *: (tail: schema.Values) = v - schema.convertToMap(tail, m.updated(p.name, head)) def unapply[This >: this.type <: SchemaCons[P, S]]: Unapply[This] = this match // case _: EmptySchema => None case _: SchemaCons[p, s] => Some((p, schema)) - transparent inline def propertyGetter[This >: this.type <: RecordSchema, P <: RecordProperty0]( - p2: P - ): RecordSchema.PropertyGetter[Values, p2.type] = - ${ propertyGetterImpl[This, this.Values, p2.type]('this, 'p2) } - - // val i: Int = indexOfProp(p2) - // (values: Values) => - // val res: Any = values.asInstanceOf[NonEmptyTuple].apply(i) - // res.asInstanceOf[RecordProperty0.PropertyValueType[p2.type]]////Tuple.Elem[Values, IndexOfProp[p.type]]] - transparent inline def propertyUpdater[This >: this.type <: RecordSchema, P <: RecordProperty0]( - inline p: P - ): PropertyUpdater[P] = - val i = indexOfProp(p) - values => - newPropertyValue => - val arr = Tuples.toIArray(values) - val updatedArray = arr.updated(i, newPropertyValue.asInstanceOf[Object]) - Tuples.fromIArray(updatedArray).asInstanceOf[Values] - - // TODO: construct a tuple expression that will return result at once, without the need to reconstruct multiple tuples along the way. - transparent inline def projectorFrom[S1 <: RecordSchema](s1: S1): s1.Values => Values = - val fp = - s1.propertyGetter[S1, p.type](p) // : s1.Values => RecordProperty0.PropertyValueType[p.type] - val fschema: s1.Values => schema.Values = schema.projectorFrom(s1) - (v: s1.Values) => fp(v) *: fschema(v) - - type AppendOtherSchema[S2 <: RecordSchema] = SchemaCons[P, schema.AppendOtherSchema[S2]] + + type AppendOtherSchema[S2 <: RecordSchema] = SchemaCons[p.type, schema.AppendOtherSchema[S2]] + transparent inline def appendOtherSchema[S2 <: RecordSchema]( - inline s2: S2 - ): AppendOtherSchema[S2] = - p #: schema.appendOtherSchema(s2) + s2: S2 + ): RecordSchema.Concat[Type, S2] = + val res = (p #: schema.appendOtherSchema(s2)) + res.asInstanceOf[RecordSchema.Concat[Type, S2]] + + inline def concat[S2 <: RecordSchema](schema2: S2): RecordSchema.Concat[Type, S2] = + appendOtherSchema(schema2) transparent inline def replace[P1 <: RecordProperty0, P2 <: RecordProperty0]( inline p1: P1, inline p2: P2 - ): RecordSchema = - inline p1 match - case `p` => p2 #: schema - case _ => p #: schema.replace(p1, p2) + ): RecordSchema.Replace[P1, P2, this.type] = + inline this match + case _: EmptySchema => EmptySchema + case v: SchemaCons[P1, s] => + v.copy(p = p2) + case v: SchemaCons[pt, s] => + v.copy(schema = schema.asInstanceOf[s].replace(p1, p2)) type Remove[P1 <: RecordProperty0] <: RecordSchema = P1 match case P => S case _ => SchemaCons[P, schema.Remove[P1]] - transparent inline def remove[P1 <: RecordProperty0](inline p1: P1): Remove[P1] = + transparent inline def remove[P1 <: RecordProperty0](p1: P1): Remove[P1] = inline p1 match case _: P => schema - case _ => SchemaCons(p, schema.remove(p1)) - - transparent inline def transformOption: OptionValues => Option[Values] = - val schemaTransformOption: schema.OptionValues => Option[schema.Values] = schema.transformOption - ov => - ov match - case None *: t => None - case Some(v: PValue) *: (t: schema.OptionValues) => - val tr = schemaTransformOption(t) - tr.map(v *: _) - case _ => - ??? - - transparent inline def transformEither[E]: EitherValues[E] => Either[List[E], Values] = - val schemaTransformEither: schema.EitherValues[E] => Either[List[E], schema.Values] = - schema.transformEither[E] - ev => - ev match - case v1 *: (t: schema.EitherValues[E]) => - val tr = schemaTransformEither(t) - v1 match - case Left(e: E) => - tr match - case Left(lst) => Left(e :: lst) - case Right(_) => Left(e :: Nil) - case Right(v: PValue) => - tr.map(v *: _) - - case _: EmptyTuple => - ??? + case _ => + println(p) + SchemaCons[P, schema.Remove[P1]](p, schema.remove(p1)) infix type #:[P <: RecordProperty0, S <: RecordSchema] = SchemaCons[P, S] +infix type ##:[S1 <: RecordSchema, S <: RecordSchema] = RecordSchema.Concat[S1, S] object RecordSchema: type SimpleProperty[R0, N, S <: SchemaLike] = RecordProperty0 { @@ -443,19 +282,6 @@ object RecordSchema: case 0 => p case S[n] => PropAt[s, n] - type ValueAt[X <: RecordSchema, N <: Int] <: Any = X match - case EmptySchema => Nothing - case SchemaCons[p, s] => - N match - case 0 => RecordProperty0.PropertyValueType[p] - case S[n] => ValueAt[s, n] - - transparent inline def valueAt[X <: RecordSchema, I <: Int]( - inline schema: X, - inline i: I - ): Any => ValueAt[X, I] = v => - scala.runtime.Tuples.apply(v.asInstanceOf[NonEmptyTuple], i).asInstanceOf[ValueAt[X, I]] - transparent inline def indexOfProp[S1 <: RecordSchema, P <: RecordProperty0]( schema: S1, inline property: P @@ -488,8 +314,8 @@ object RecordSchema: (summonInline[ValueOf[p]].value #: constSchema[s]).asInstanceOf[S] transparent inline infix def prepend[S <: RecordSchema, P <: RecordProperty0]( - inline p: P, - inline schema: S + p: P, + schema: S ): SchemaCons[P, S] = SchemaCons[P, S](p, schema) @@ -499,28 +325,18 @@ object RecordSchema: case SchemaCons[P1, st] => st case SchemaCons[pt, st] => SchemaCons[pt, Remove[P1, st]] + type Replace[P1 <: RecordProperty0, P2 <: RecordProperty0, S <: RecordSchema] <: RecordSchema = + S match + case EmptySchema => EmptySchema.type + case SchemaCons[P1, st] => SchemaCons[P2, st] + case SchemaCons[pt, st] => SchemaCons[pt, Replace[P1, P2, st]] + type IsPropertyInSchema[P <: RecordProperty0, Schema <: RecordSchema] <: Boolean = Schema match case EmptySchema => false case SchemaCons[`P`, _] => true case SchemaCons[_, s] => IsPropertyInSchema[P, s] - type PropertyGetter[Values <: Tuple, P <: RecordProperty0] = - Values => RecordProperty0.PropertyValueType[P] - // Schema match - // case EmptySchema => EmptyTuple => Nothing - // case SchemaCons[`P`, s] => `P` *: s#Values => RecordProperty0.PropertyValueType[P] - // case SchemaCons[_, s] => PropertyGetter[s, P] - - // // DOESN'T WORK - // transparent inline def removeDoesntWork - // [P1 <: RecordProperty0, S <: RecordSchema] - // (inline p1: P1, inline schema: S): Remove[P1, S] = - // inline schema match - // case EmptySchema : EmptySchema => EmptySchema - // case SchemaCons(`p1`, s): SchemaCons[P1, st] => s - // case SchemaCons(p, s): SchemaCons[pt, st] => SchemaCons(p, removeDoesntWork(p1, s)) - type Reverse0[S <: RecordSchema, Accum <: RecordSchema] <: RecordSchema = S match case EmptySchema => Accum case SchemaCons[p, s] => @@ -554,30 +370,3 @@ transparent inline def isPropertyInSchema[P <: RecordProperty0, Schema <: Record case _: EmptySchema => false case _: SchemaCons[`P`, _] => true case sc: SchemaCons[_, s] => isPropertyInSchema[P, s](p, sc.schema) - -// def isPropertyInSchemaImpl[P <: RecordProperty0, Schema <: RecordSchema]( -// schema: Expr[Schema], p2: Expr[P] -// )(using pt: Type[P], schemat: Type[Schema])(using Quotes): Expr[RecordSchema.IsPropertyInSchema[P, Schema]] = -// import quotes.reflect.* -// '{ -// ${schemat match -// case -// } -// } -def propertyGetterImpl[Schema <: RecordSchema, Values <: Tuple, P <: RecordProperty0]( - schema: Expr[Schema], - p2: Expr[P] -)(using pt: Type[P], schemat: Type[Schema], values: Type[Values])(using - Quotes -): Expr[RecordSchema.PropertyGetter[Values, P]] = - '{ - val propName = s"prop:${($p2)}" - if isPropertyInSchema($p2, $schema) then - val i: Int = $schema.indexOfProp($p2).asInstanceOf[Int] - val f = (v: Values) => - (Tuples - .apply(v.asInstanceOf[NonEmptyTuple], i) - .asInstanceOf[RecordProperty0.PropertyValueType[P]]) - f.asInstanceOf[RecordSchema.PropertyGetter[Values, P]] - else sys.error(s"property $propName is not in schema ${${ schema }}") - } diff --git a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SchemaBuilders.scala b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SchemaBuilders.scala index 7c92a37..7656f27 100644 --- a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SchemaBuilders.scala +++ b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SchemaBuilders.scala @@ -5,6 +5,7 @@ import scala.quoted.Expr import scala.quoted.Type import scala.quoted.Quotes import scala.quoted.Varargs +import ru.primetalk.typed.ontology.utils.objectName trait RecordSchemaBuilderBase: type RecordType @@ -33,13 +34,13 @@ def fieldsReverseImpl[S <: RecordSchema]( seq match case Seq() => schemaExpr - case Seq('{ $a: at }, as*) => // здесь важно сохранить тип, чтобы + case Seq('{type at; $a: `at` }, as*) => // здесь важно сохранить тип, чтобы fieldsReverseImpl(Varargs(as), '{ RecordSchema.prepend(${ a }, ${ schemaExpr }) }) -def fieldsImpl[S <: RecordSchema]( +def fieldsImpl[S <: RecordSchema: Type]( propertyList: Expr[Seq[RecordProperty0]], schemaExpr: Expr[S] -)(using Type[S])(using Quotes): Expr[RecordSchema] = +)(using Quotes): Expr[RecordSchema] = propertyList match case Varargs(as) => fieldsReverseImpl(Varargs(as.reverse), schemaExpr) @@ -114,4 +115,5 @@ abstract class TableBuilder extends PropertiesBuilder with ForeignKeyBuilder wit type TableSchema <: RecordSchema transparent inline def infer[S <: RecordSchema]: S = RecordSchema.constSchema[S] val tableSchema: TableSchema - type Row = tableSchema.Values + + val tableName = objectName(this) diff --git a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SchemaProvider.scala b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SchemaProvider.scala index 5c20c12..b529a71 100644 --- a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SchemaProvider.scala +++ b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SchemaProvider.scala @@ -18,23 +18,23 @@ object TupleSchemaProvider: def apply[T](using s: TupleSchemaProvider[T]): TupleSchemaProvider[T] = s object SchemaProvider { - def apply[T](using s: SchemaProvider[T]): SchemaProvider[T] = s + def apply[T](using s: SchemaProvider[T]): s.type = s given TupleSchemaProvider[EmptyTuple] = new { type Schema = EmptyTupleSchema.type val schema: Schema = EmptyTupleSchema } given SchemaProvider[Boolean] = new { - type Schema = ScalarSchema.BooleanScalarSchema.type - val schema = ScalarSchema.BooleanScalarSchema + type Schema = ScalarSchema1.BooleanScalarSchema.type + val schema = ScalarSchema1.BooleanScalarSchema } given SchemaProvider[Int] = new { - type Schema = ScalarSchema.IntScalarSchema.type - val schema = ScalarSchema.IntScalarSchema + type Schema = ScalarSchema1.IntScalarSchema.type + val schema = ScalarSchema1.IntScalarSchema } given SchemaProvider[String] = new { - type Schema = ScalarSchema.StringScalarSchema.type - val schema = ScalarSchema.StringScalarSchema + type Schema = ScalarSchema1.StringScalarSchema.type + val schema = ScalarSchema1.StringScalarSchema } given [H: SchemaProvider, T <: Tuple: TupleSchemaProvider]: TupleSchemaProvider[H *: T] = @@ -44,7 +44,7 @@ object SchemaProvider { type Schema = NonEmptyTupleSchema[hs.Schema, ts.Schema] // SchemaProvider.apply[H].Schema val schema = NonEmptyTupleSchema[hs.Schema, ts.Schema](hs.schema, ts.schema) } - def derived[T <: Product](using + transparent inline def derived[T <: Product](using m: Mirror.ProductOf[T], elems: TupleSchemaProvider[m.MirroredElemTypes], caseClassMeta: CaseClassMeta[T] diff --git a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SchemaValueType.scala b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SchemaValueType.scala index 85a0a49..9ebbcf4 100644 --- a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SchemaValueType.scala +++ b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SchemaValueType.scala @@ -1,10 +1,59 @@ package ru.primetalk.typed.ontology.simple.meta -/** Type class that provides value type for the given schema. - * An instance of this class could be used to retrieve type representation for the schema. - */ -trait SchemaValueType[S <: SchemaLike]: - type Value +/** Тип-аннотация, позволяющий привязать к произвольному значению сведения о схеме этого значения. + */ +sealed trait WithSchema[S <: SchemaLike] -trait SchemaTupleValueType[S <: TupleSchema] extends SchemaValueType[S]: +/** Приклеиваем к типу значения его схему. */ +type #@[A, S <: SchemaLike] = A & WithSchema[S] + +extension [A](a: A) + def annotated[S <: SchemaLike]: A #@ S = + a.asInstanceOf[A #@ S] + +/** Type class that provides value type for the given schema. An instance of this class could be + * used to retrieve type representation for the schema. + */ +class SchemaValueType[S <: SchemaLike, V]: + type Schema = S + type Value = V + type AValue = V #@ S + +object SchemaValueType: + def apply[S <: SchemaLike](using svt: SchemaValueType[S, ?]): svt.type = + svt + + /** Helper type to simplify SchemaValueType search. + */ + type Aux1[S <: SchemaLike] = SchemaValueType[S, ?] + + /** Constructs or deconstructs SchemaValueType using provided type parameters. + */ + type Aux[S <: SchemaLike, V] = SchemaValueType[S, V] + +/** Similar mechanism that isolates TupleSchema. We had to implement this auxiliary trait because + * Scala 3.4.0 wasn't able to disambiguate plain SchemaValueType of two types that have Value <: + * Tuple. We provide an implicit conversion though. + */ +trait TupleSchemaValueType[S <: TupleSchema]: + type Schema = S + type Value <: Tuple + +/** Similar mechanism that isolates TupleSchema. We had to implement this auxiliary trait because + * Scala 3.4.0 wasn't able to disambiguate plain SchemaValueType of two types that have Value <: + * Tuple. + */ +trait RecordSchemaValueType[S <: RecordSchema]: + type Schema = S type Value <: Tuple + +/** Type class that returns the type of property value. + */ +trait RecordPropertyValueType[P <: RecordProperty0, V]: + type Schema <: SchemaLike + type Value = V + +object RecordPropertyValueType: + def apply[P <: RecordProperty0](using rpvt: RecordPropertyValueType[P, ?]): rpvt.type = + rpvt + type Aux1[P <: RecordProperty0] = RecordPropertyValueType[P, ?] diff --git a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SimpleTypes.scala b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SimpleTypes.scala index bb93c3e..ab6aec3 100644 --- a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SimpleTypes.scala +++ b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SimpleTypes.scala @@ -1,12 +1,258 @@ package ru.primetalk.typed.ontology.simple.meta -object SimpleTypes: - given [T]: SchemaValueType[ScalarSchema1[T]] = +import scala.annotation.targetName +import java.time.LocalDateTime +import scala.runtime.Tuples +import scala.quoted.Type + +/** Provides storage types for ScalarSchema1[T] */ +trait ScalarSimpleTypes: + /** We might have to explicitly limit the list of supported types to avoid ambiguity. Though, + * using low priority implicits might help. + */ + type ScalarTypes = AnyVal | String | BigInt | LocalDateTime // Int | String | Boolean | Double + transparent inline given scalarSchema1svt[ + T <: ScalarTypes, + S <: ScalarSchema1[T] + ]: SchemaValueType[S, T] = + new SchemaValueType[S, T] + +trait ConversionSimpleTypes: + transparent inline given tupleSchemaValueType[S <: TupleSchema](using + tsvt: TupleSchemaValueType[S] + ): SchemaValueType[S, tsvt.Value] = + new SchemaValueType[S, tsvt.Value] + + transparent inline given recordSchemaValueType[S <: RecordSchema](using + rsvt: RecordSchemaValueType[S] + ): SchemaValueType[S, rsvt.Value] = + new SchemaValueType[S, rsvt.Value] + +/** Provide storage types (tuples) for tuple schemas. + */ +trait TupleSimpleTypes: + + transparent inline given emptyTupleSchemaSvt: TupleSchemaValueType[EmptyTupleSchema.type] = + new: + type Schema = EmptyTupleSchema.type + type Value = EmptyTuple + + transparent inline given nonEmptyTupleSchema[ + HS <: SchemaLike, + TS <: TupleSchema, + ResultS <: NonEmptyTupleSchema[HS, TS] + ](using + hs: SchemaValueType.Aux1[HS], + ts: TupleSchemaValueType[TS] + ): TupleSchemaValueType[ResultS] = + new: + type Schema = ResultS + type Value = hs.Value *: ts.Value + +/** Converts schema value type to RecordPropertyValueType for properties. */ +trait PropertySimpleTypes: + + // transparent inline given propertyValueType[B, P <: SimplePropertyId[?, B]](using + // vp: ValueOf[P], + // svt: SchemaValueType.Aux1[vp.value.Schema] + // ): RecordPropertyValueType[P, B] = + // new: + // val property = vp.value + // // type Value = svt.Value + + transparent inline given propertyValueType[B, P <: SimplePropertyId[?, B]](using vp: ValueOf[P]): RecordPropertyValueType[P, B] = + ${ SimpleTypesMacro.propertyValueType1Impl[B, P]('vp) } + + // transparent inline given propertyValueType2[B, P <: SimplePropertyId[?, B]]: RecordPropertyValueType[P] = + // new: + // type Value = B + +trait RecordSchemaSimpleTypes: + val a = 0 + // SchemaValueTypes + transparent inline given emptySchemaSVT: SchemaValueType[EmptySchema.type, EmptyTuple] = + new SchemaValueType[EmptySchema.type, EmptyTuple] + + transparent inline given emptySchemaRSVT: RecordSchemaValueType[EmptySchema.type] = new: - type Value = T - given SchemaTupleValueType[EmptyTupleSchema.type] = + type Schema = EmptySchema.type + type Value = EmptyTuple + + transparent inline given tuple1Schema[VP, P <: SimplePropertyId[?, VP]](using + svtp: RecordPropertyValueType[P, VP] + ): RecordSchemaValueType[SchemaCons[P, EmptySchema]] = + new RecordSchemaValueType[SchemaCons[P, EmptySchema]]: + type Value = Tuple1[VP] + + transparent inline given nonEmptySchema[ + VP, + P <: SimplePropertyId[?, VP], + S <: NonEmptySchema, + // RS <: P #: S + ](using + svtp: RecordPropertyValueType[P, VP], + svts: RecordSchemaValueType[S] + ): RecordSchemaValueType[P #: svts.Schema] = new: - type Value = EmptyTuple.type - given [HS <: SchemaLike, TS <: TupleSchema](using hs: SchemaValueType[HS], ts: SchemaTupleValueType[TS]): SchemaTupleValueType[NonEmptyTupleSchema[HS, TS]] = + type Schema = P #: svts.Schema + type Value = VP *: svts.Value + +// Projectors +trait ProjectorSimpleTypes extends RecordSchemaSimpleTypes: + + // Projectors for properties + transparent inline given propertyProjectorHead[ + VP, + P <: SimplePropertyId[?, VP], + S <: RecordSchema, + From <: P #: S, + VFrom <: Tuple, + ](using + rpvt: RecordPropertyValueType[P, VP], + svtp: SchemaValueType[rpvt.Schema, VP], + svtps: SchemaValueType[From, VFrom] + ): PropertyProjector[From, VFrom, P, VP] = + + new: + val from: SchemaValueType[From, VFrom] = svtps + val rpvt: RecordPropertyValueType[P, VP] = rpvt + type Value = VP + def apply(v: VFrom): VP = + v match + case h *: _ => + h.asInstanceOf[VP] + case _ => + ??? + + transparent inline given propertyProjectorTail[ + VP, + P <: SimplePropertyId[?, VP], + VP2, + P2 <: SimplePropertyId[?, VP2], + S <: RecordSchema, + VS <: Tuple, + // From <: P2 #: S, + ](using + rpvt1: RecordPropertyValueType[P, VP], + propertyProjector: PropertyProjector[S, VS, P, VP], + svtps: SchemaValueType[P2 #: S, VP2 *: VS] + ): PropertyProjector[P2 #: S, VP2 *: VS, P, VP] = + new: + val from: SchemaValueType[P2 #: S, VP2 *: VS] = svtps + val rpvt: RecordPropertyValueType[P, VP] = rpvt1 + type Value = VP + + def apply(v: VP2 *: VS): VP = + v match + case _ *: (t: VS) => + propertyProjector(t) + + // Projectors for various schemas + transparent inline given emptySchemaProjector[From <: RecordSchema, VFrom](using + svt: SchemaValueType[From, VFrom] + ): Projector[From, VFrom, EmptySchema, EmptyTuple] = new: - type Value = hs.Value *: ts.Value + val from: SchemaValueType[From, VFrom] = svt + val to: SchemaValueType[EmptySchema.type, EmptyTuple] = + emptySchemaSVT + def apply(v: VFrom): EmptyTuple = + EmptyTuple + + transparent inline given someSchemaPlusPropertyProjector[ + From <: RecordSchema, + VFrom, + P <: SimplePropertyId[?, VP], + VP, + S <: RecordSchema, + VS <: Tuple, + ](using + rpvt: RecordPropertyValueType[P, VP], + existingSchemaProjector: Projector[From, VFrom, S, VS], + propertyProjector: PropertyProjector[From, VFrom, P, VP], + svtps: SchemaValueType[P #: S, VP *: VS] + ): Projector[From, VFrom, P #: S, VP *: VS] = + new: + val from: SchemaValueType[From, VFrom] = existingSchemaProjector.from + val to: SchemaValueType[P #: S, VP *: VS] = svtps + def apply(v: VFrom): VP *: VS = + (propertyProjector(v) *: existingSchemaProjector(v)) + + implicit class ValueOps[S <: RecordSchema, V](v: V)(using svtv: SchemaValueType[S, V]): + type Schema = S + @targetName("get") + def /[ + VP, + P <: SimplePropertyId[?, VP], + ](p: P)(using prj: PropertyProjector[S, V, P, VP]): VP = + prj(v) + +trait TableBuilderSimpleTypes: + final case class TableBuilderExtensionR[S <: RecordSchema]( + schema: S, + svt: RecordSchemaValueType[S] + ): + type Row = svt.Value + + implicit def tableBuilderExtension[T <: TableBuilder](t: T)(using + svt1: RecordSchemaValueType[t.TableSchema] + ): TableBuilderExtensionR[t.TableSchema] = + TableBuilderExtensionR[t.TableSchema](t.tableSchema, svt1) + +trait ConcatenatorSimpleTypes extends RecordSchemaSimpleTypes: + + transparent inline given emptyBConcatenator[B <: RecordSchema, VB](using + SchemaValueType[B, VB] + ): Concatenator[EmptySchema, EmptyTuple, B, VB, VB] = + new: + val aSvt = summon[SchemaValueType[EmptySchema, EmptyTuple]] + val bSvt = summon[SchemaValueType[B, VB]] + + type Schema = B + + def schemaConcat(a: EmptySchema, b: B): Schema = b + val abSvt: SchemaValueType[B, VB] = bSvt + + def apply(a: EmptyTuple, b: VB): VB = + b + + transparent inline given nonEmptyConcatenator[ + VP, + P <: SimplePropertyId[?, VP], + + S <: RecordSchema, + VS <: Tuple, + + B <: RecordSchema, + VB <: Tuple, + + VSB <: Tuple + ](using + svtS: SchemaValueType[S, VS], + svtA: SchemaValueType[P #: S, VP *: VS], + svtB: SchemaValueType[B, VB], + concatSB: Concatenator[S, VS, B, VB, VSB], + svt: SchemaValueType[P #: concatSB.Schema, VP *: VSB] + ): Concatenator[P #: S, VP *: VS, B, VB, VP *: VSB] = + new: + val aSvt = svtA + val bSvt = svtB + + type Schema = P #: concatSB.Schema + + def schemaConcat(a: P #: S, b: B): Schema = + a.appendOtherSchema(b) + + val abSvt: SchemaValueType[Schema, VP *: VSB] = svt + + def apply(a: VP *: VS, b: VB): VP *: VSB = + Tuples.concat(a, b).asInstanceOf[abSvt.Value] + +object SimpleTypes + extends ScalarSimpleTypes + with ConversionSimpleTypes + // with TupleSimpleTypes + with PropertySimpleTypes + with RecordSchemaSimpleTypes + with ProjectorSimpleTypes + with ConcatenatorSimpleTypes +//with TableBuilderSimpleTypes diff --git a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SimpleTypesMacro.scala b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SimpleTypesMacro.scala new file mode 100644 index 0000000..be108a3 --- /dev/null +++ b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/meta/SimpleTypesMacro.scala @@ -0,0 +1,12 @@ +package ru.primetalk.typed.ontology.simple.meta + +import scala.quoted.{Expr, Type, Quotes} +import scala.compiletime.constValue + +object SimpleTypesMacro: + def propertyValueType1Impl[B: Type, P <: SimplePropertyId[?, B]: Type](vp: Expr[ValueOf[P]])(using Quotes): Expr[RecordPropertyValueType[P, B]] = + '{ + new RecordPropertyValueType[P, B]{ + type Schema = ScalarSchema1[B] + } + } diff --git a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/relalg/ExprClassicDsl.scala b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/relalg/ExprClassicDsl.scala index 6f8dc86..3f39060 100644 --- a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/relalg/ExprClassicDsl.scala +++ b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/relalg/ExprClassicDsl.scala @@ -1,12 +1,16 @@ package ru.primetalk.typed.ontology.simple.relalg +import ru.primetalk.typed.ontology.simple.meta.{Projector, PropertyProjector} import ru.primetalk.typed.ontology.simple.meta.RecordSchema -import ru.primetalk.typed.ontology.simple.meta.RecordProperty0 +import ru.primetalk.typed.ontology.simple.meta.SchemaValueType +import ru.primetalk.typed.ontology.simple.meta.RecordSchemaValueType +import ru.primetalk.typed.ontology.simple.meta.SimplePropertyId trait ExprClassicDsl: type Schema <: RecordSchema val schema: Schema - type Row = schema.Values + type Row + val svt: SchemaValueType[Schema, Row] // DSL. It is part of a single relation to have intrinsic access to schema sealed trait RelExpr[T] @@ -15,10 +19,12 @@ trait ExprClassicDsl: case class Function2Expr[A, B, C](r1: RelExpr[A], r2: RelExpr[B], name: String, op: (A, B) => C) extends RelExpr[C] - inline def prop[P <: RecordProperty0](p: P): Getter[RecordProperty0.PropertyValueType[p.type]] = - Getter(p.name, schema.propertyGetter(p)) + inline def prop[VP, P <: SimplePropertyId[?, VP]](p: P)(using + prj: PropertyProjector[Schema, Row, p.type, VP] + ): Getter[VP] = + Getter(p.name, r => prj(r)) - inline def const[T](inline t: T): Getter[T] = Getter(s"$t", _ => t) + inline def const[T](t: T): Getter[T] = Getter(s"$t", _ => t) extension [T](r: RelExpr[T]) inline def ===(inline other: RelExpr[T]): Function2Expr[T, T, Boolean] = diff --git a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/relalg/Relation.scala b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/relalg/Relation.scala index 67088db..b025933 100644 --- a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/relalg/Relation.scala +++ b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/relalg/Relation.scala @@ -11,29 +11,34 @@ import cats.kernel.Semigroup import cats.kernel.Order import scala.collection.immutable.SortedMap import cats.Applicative -import ru.primetalk.typed.ontology.simple.meta.ForeignKeyId0 -import ru.primetalk.typed.ontology.simple.meta.RecordSchema -import ru.primetalk.typed.ontology.simple.meta.RecordProperty -import ru.primetalk.typed.ontology.simple.meta.RecordProperty0 -import ru.primetalk.typed.ontology.simple.meta.TableBuilder +import ru.primetalk.typed.ontology.simple.meta.{#:, ##:, + Concatenator, ForeignKeyId0, Projector, RecordSchema, + RecordProperty, TableBuilder, SchemaValueType, + RecordSchemaValueType, EmptySchema} +import cats.syntax.flatMap.given +import cats.syntax.functor.given +import cats.syntax.foldable.given +import ru.primetalk.typed.ontology.simple.meta.SimplePropertyId +import scala.compiletime.summonInline /** Relation is a pair of schema and a collection of instances of that schema. V - is the collection * type (List, Stream[...]). */ -abstract class Relation[V[_]] extends ExprClassicDsl: +abstract class Relation[S <: RecordSchema, VS, V[_]](val schema: S)(using SchemaValueType[S, VS]) + extends ExprClassicDsl: self => - type Schema <: RecordSchema - val schema: Schema - type Row = schema.Values - val rows: V[schema.Values] - type Self = Relation[V] { + type Schema = S + + val svt: SchemaValueType[S, VS] = summon[SchemaValueType[S, VS]] + + type Row = VS + val rows: V[Row] + type Self = Relation[S, VS, V] { type Schema = self.Schema type Row = self.Row } def show(using Foldable[V]) = - import cats.syntax.foldable.given - schema.toString + "\n-----\n" + rows @@ -41,88 +46,113 @@ abstract class Relation[V[_]] extends ExprClassicDsl: .reverse .mkString("\n") - transparent inline def projection[S2 <: RecordSchema](s2: S2)(using Functor[V]) = - import cats.syntax.functor.given - val f = s2.projectorFrom(schema) - val vals = rows.map(f) - Relation(s2)(vals) - - transparent inline def crossProductFrom[R1 <: Relation[V]](r1: R1)(using - FlatMap[V] - ): Relation[V] = - import cats.syntax.flatMap.given - import cats.syntax.functor.given - val schema3 = r1.schema.appendOtherSchema(schema) - val concatValues: (r1.schema.Values, schema.Values) => schema3.Values = - r1.schema.appendValues(schema)(schema3) + transparent inline def projection[S2 <: RecordSchema, VS2 <: Tuple](s2: S2)(using + proj: Projector[Schema, Row, S2, VS2], + f: Functor[V] + ): Relation[S2, VS2, V] = + val rsvt: SchemaValueType[S2, VS2] = proj.to + val vals = rows.map(v => proj(v).asInstanceOf[rsvt.Value]) + Relation[S2, VS2, V](s2)(using rsvt)(vals) + + /** Строим декартово произведение отношения r1 и текущего отношения. + */ + transparent inline def crossProductFrom[ + S1 <: RecordSchema, + VS1, + R1 <: Relation[S1, VS1, V], + VRes + ](r1: R1)(using + fm: FlatMap[V], + concat: Concatenator[S1, VS1, S, VS, VRes] + )(using + ev1: r1.Row =:= concat.aSvt.Value + // ev2: Row =:= concat.bSvt.Value + ): Relation[concat.Schema, concat.abSvt.Value, V] = + type S = concat.Schema + val schema3: S = concat.schemaConcat(r1.schema, schema) val vals = for row1 <- r1.rows row2 <- this.rows - yield concatValues(row1, row2) - Relation(schema3)(vals) - - transparent inline def crossProduct[R2 <: Relation[V]](r2: R2)(using FlatMap[V]) = - import cats.syntax.flatMap.given - import cats.syntax.functor.given - val schema3 = schema.appendOtherSchema(r2.schema) - val f: (schema.Values, r2.schema.Values) => schema3.Values = - schema.appendValues(r2.schema)(schema3) - val vals = - for - row1 <- this.rows - row2 <- r2.rows - yield f(row1, row2) - Relation[schema3.type, V](schema3)(vals) - - transparent inline def join[FK <: ForeignKeyId0, R2 <: Relation[V]](inline fk: FK)(r2: R2)(using - FlatMap[V] - )(using FunctorFilter[V]) = - import cats.syntax.flatMap.given - import cats.syntax.functor.given - import cats.syntax.functorFilter.given - val schema3 = schema.appendOtherSchema(r2.schema) - val concatValues: (schema.Values, r2.schema.Values) => schema3.Values = - schema.appendValues(r2.schema)(schema3) - val pred: schema3.Values => Boolean = schema3.fkPredicate(fk) + yield concat(row1, row2) + Relation[concat.Schema, concat.abSvt.Value, V](schema3: S)(using concat.abSvt)(vals) + + transparent inline def crossProduct[S2 <: RecordSchema, VS2, R2 <: Relation[S2, VS2, V], VRes]( + r2: R2 + )(using + fm: FlatMap[V], + concat: Concatenator[S, VS, S2, VS2, VRes] + ): Relation[concat.Schema, concat.abSvt.Value, V] = + type S = concat.Schema + val schema3: S = concat.schemaConcat(schema, r2.schema) val vals = for row1 <- this.rows row2 <- r2.rows - row3 = concatValues(row1, row2) - // if pred(row3) - yield row3 - val filtered = vals.filter(pred) - Relation[schema3.type, V](schema3)(filtered) - - transparent inline def prependCalcColumn[P <: RecordProperty0](p: P)(inline f: Row => p.P)(using - FlatMap[V] - ) = - import cats.syntax.flatMap.given - import cats.syntax.functor.given - val schema3 = p #: schema - val vals = rows.map(row => (f(row) *: row).asInstanceOf[schema3.Values]) - Relation(schema3)(vals) - transparent inline def prependCalcColumnF[P <: RecordProperty0](p: P)(inline f: RelExpr[p.P])( - using FlatMap[V] + yield concat(row1, row2) + Relation[concat.Schema, concat.abSvt.Value, V](schema3: S)(using concat.abSvt)(vals) + +// transparent inline def join[FK <: ForeignKeyId0, R2 <: Relation[V]](inline fk: FK)(r2: R2)(using +// FlatMap[V] +// )(using FunctorFilter[V]) = +// import cats.syntax.flatMap.given +// import cats.syntax.functor.given +// import cats.syntax.functorFilter.given +// val schema3 = schema.appendOtherSchema(r2.schema) +// val concatValues: (schema.Values, r2.schema.Values) => schema3.Values = +// schema.appendValues(r2.schema)(schema3) +// val pred: schema3.Values => Boolean = schema3.fkPredicate(fk) +// val vals = +// for +// row1 <- this.rows +// row2 <- r2.rows +// row3 = concatValues(row1, row2) +// // if pred(row3) +// yield row3 +// val filtered = vals.filter(pred) +// Relation[schema3.type, V](schema3)(filtered) + + transparent inline def prependCalcColumn[T, P <: SimplePropertyId[?, T], VRes](p: P)( + using + sv: SchemaValueType[p.type #: EmptySchema, Tuple1[T]], + fm: FlatMap[V], + concat: Concatenator[p.type #: EmptySchema, Tuple1[T], Schema, VS, VRes], +)(f: VS => T) = + val pSchema: p.type #: EmptySchema = p #: EmptySchema + + val schema3 = concat.schemaConcat(pSchema, schema) + val vals = rows.map{row => + val pv: sv.Value = Tuple1(f(row)) + (concat(pv, row)) + } + Relation[concat.Schema, concat.abSvt.Value, V](schema3)(using concat.abSvt)(vals) + + transparent inline def prependCalcColumnF[T, P <: SimplePropertyId[?, T], VRes](p: P)(inline f: RelExpr[T])( + using + sv: SchemaValueType[p.type #: EmptySchema, Tuple1[T]], + fm: FlatMap[V], + concat: Concatenator[p.type #: EmptySchema, Tuple1[T], Schema, VS, VRes] ) = - prependCalcColumn(p)(rowFun(f)) + val fun = rowFun(f) + prependCalcColumn(p)(fun) + // type Rename[T, P1 <: RecordProperty[T], P2 <: RecordProperty[T]] <: RecordSchema transparent inline def rename[T, P1 <: RecordProperty[T], P2 <: RecordProperty[T]]( inline p1: P1, - p2: P2 - )(using Functor[V]) = - val schema3 = schema.rename(p1, p2) - val vals = rows.asInstanceOf[V[schema3.Values]] // to avoid iteration and map - Relation[schema3.type, V](schema3)(vals) - - transparent inline def ++[R2 <: Relation[V]](r2: R2)(using - ev: r2.schema.Values =:= schema.Values - )(using SemigroupK[V])(using Functor[V]) = + p2: P2, + // svt3: SchemaValueType[RecordSchema.Replace[P1, P2, Schema], Row] + )(using Functor[V]) = //: Rename[T, P1, P2] = + val schema3 = schema.rename(p1, p2).asInstanceOf[RecordSchema.Replace[P1, P2, Schema]] + type Schema1 = Schema + val svt3 = new SchemaValueType[RecordSchema.Replace[P1, P2, Schema], Row]: + type Schema = RecordSchema.Replace[P1, P2, Schema1] + type Value = Row + val vals = rows//.asInstanceOf[V[schema3.Values]] // to avoid iteration and map + Relation[RecordSchema.Replace[P1, P2, Schema], Row, V](schema3)(using svt3)(vals) + + transparent inline def ++[R2 <: Relation[S, VS, V]](r2: R2)(using SemigroupK[V])(using Functor[V]) = import cats.syntax.all.toSemigroupKOps - // val vals = rows <+> r2.rows.map(ev) - // to avoid iteration and map we cast: - val vals = rows <+> r2.rows.asInstanceOf[V[schema.Values]] // to avoid iteration and map + val vals = rows <+> r2.rows Relation(schema)(vals) transparent inline def replaceRows(inline f: V[Row] => V[Row]) = @@ -140,157 +170,161 @@ abstract class Relation[V[_]] extends ExprClassicDsl: import cats.syntax.functorFilter.given replaceRows(_.filterNot(predicate)) - /** NB: O(N + M ln M), N = rows.size, M = R2.rows.size */ - transparent inline def --[R2 <: Relation[V]](r2: R2)(using - ev: r2.schema.Values =:= schema.Values - )(using Foldable[V])(using FunctorFilter[V]) = - import cats.syntax.all.toFoldableOps - val set2 = r2.rows.asInstanceOf[V[Row]].foldLeft(Set[Row]())(_ + _) - filterNot(set2.contains) - - final def groupBy[B](f: Row => B)(using Order[B])(using Foldable[V])(using MonoidK[V])(using - Applicative[V] - ): SortedMap[B, V[Row]] = - groupMap(key = f)(identity) - - final def groupMap[K, B](key: Row => K)(f: Row => B)(using K: Order[K])(using Foldable[V])(using - SemigroupK[V] - )(using Applicative[V]): SortedMap[K, V[B]] = - given ordering: Ordering[K] = K.toOrdering - import cats.syntax.all.toFoldableOps - import cats.syntax.all.toSemigroupKOps - val app = Applicative[V] - rows.foldLeft(SortedMap.empty[K, V[B]])((m, elem) => - val k = key(elem) - m.get(k) match { - case Some(b) => m.updated(key = k, value = b <+> app.pure(f(elem))) - case None => m + (k -> app.pure(f(elem))) - } - ) - final def groupMapReduce[K, B](key: Row => K)(f: Row => B)(using K: Order[K], S: Semigroup[B])( - using Foldable[V] - ): SortedMap[K, B] = - groupMapReduceWith(key)(f)(S.combine) - - final def groupMapReduceWith[K, B](key: Row => K)(f: Row => B)(combine: (B, B) => B)(using - K: Order[K] - )(using Foldable[V]): SortedMap[K, B] = - given ordering: Ordering[K] = K.toOrdering - import cats.syntax.all.toFoldableOps - - rows.foldLeft(SortedMap.empty[K, B])((m, elem) => - val k = key(elem) - - m.get(k) match { - case Some(b) => m.updated(key = k, value = combine(b, f(elem))) - case None => m + (k -> f(elem)) - } - ) - def toSemigroup[A](combineImpl: (A, A) => A): Semigroup[A] = - new Semigroup[A]: - def combine(a: A, b: A): A = combineImpl(a, b) - - transparent inline def groupMapReduceS[ - KeySchema <: RecordSchema, - AggregateSchema <: RecordSchema - ]( - keySchema: KeySchema, - aggregateSchema: AggregateSchema - // )( - // resultSchema: RecordSchema.Concat[keySchema.type, aggregateSchema.type],//inline schema3: RecordSchema.Concat[this.type, schema2.type] - )( - inline k: Row => keySchema.Values, - inline m: Row => aggregateSchema.Values - )(using Order[keySchema.Values])(using - Semigroup[aggregateSchema.Values] - ) // to aggregate values, something like `sum` - (using MonoidK[V])(using Applicative[V])(using Foldable[V]) - // : Relation[V]{ - // // type Schema = resultSchema.type - // } - = - - val resultSchema = keySchema.concat[aggregateSchema.type]( - aggregateSchema - ) // : RecordSchema.Concat[keySchema.type, aggregateSchema.type] - - val grouped = groupMapReduce[keySchema.Values, aggregateSchema.Values](k)(m) - - convertSortedMapToRelation(keySchema, aggregateSchema)(grouped) - // val concat = keySchema.concatValues(aggregateSchema)(resultSchema) - // // concat - // val allVals: Iterable[resultSchema.Values] = grouped.toIterable.map(concat(_, _)) - // import cats.MonoidK.ops.toAllMonoidKOps - // val vals = allVals.foldLeft(MonoidK[V].empty[resultSchema.Values])((b, a) => b <+> Applicative[V].pure(a)) - // Relation.apply(resultSchema)(vals) -transparent inline def convertSortedMapToRelation[ - V[_], - KeySchema <: RecordSchema, - AggregateSchema <: RecordSchema - // ResultSchema <: RecordSchema.Concat[KeySchema, AggregateSchema] -]( - keySchema: KeySchema, - aggregateSchema: AggregateSchema - // )( - // resultSchema: RecordSchema.Concat[keySchema.type, aggregateSchema.type],//inline schema3: RecordSchema.Concat[this.type, schema2.type] -)(grouped: SortedMap[keySchema.Values, aggregateSchema.Values])(using Order[keySchema.Values])(using - Semigroup[aggregateSchema.Values] -) // to aggregate values, something like `sum` -(using MonoidK[V])(using Applicative[V])(using Foldable[V]): Relation[V] { - type Schema <: RecordSchema.Concat[keySchema.type, aggregateSchema.type] -} = - val resultSchema = keySchema.concat[aggregateSchema.type](aggregateSchema) - val concat = keySchema.concatValues(aggregateSchema)(resultSchema) - val allVals: Seq[resultSchema.Values] = grouped.toSeq.map(concat(_, _)) - val vals: V[resultSchema.Values] = - allVals.foldLeft(MonoidK[V].empty[resultSchema.Values])((b, a) => - MonoidK[V].combineK(b, Applicative[V].pure(a)) - ) - Relation.apply[resultSchema.type, V](resultSchema)(vals) - -transparent inline def convertSortedMapToV[ - V[_], - KeySchema <: RecordSchema, - AggregateSchema <: RecordSchema - // ResultSchema <: RecordSchema.Concat[KeySchema, AggregateSchema] -]( - keySchema: KeySchema, - aggregateSchema: AggregateSchema -)(grouped: SortedMap[keySchema.Values, aggregateSchema.Values])(using Order[keySchema.Values])(using - Semigroup[aggregateSchema.Values] -) // to aggregate values, something like `sum` -(using MonoidK[V])(using Applicative[V])(using Foldable[V]) -// : Relation[V]{ -// // type Schema = resultSchema.type -// } -= - val resultSchema = - keySchema.concat(aggregateSchema) // : RecordSchema.Concat[keySchema.type, aggregateSchema.type] - val concat = keySchema.concatValues(aggregateSchema)(resultSchema) - val allVals: Seq[resultSchema.Values] = grouped.toSeq.map(concat(_, _)) - val vals = allVals.foldLeft(MonoidK[V].empty[resultSchema.Values])((b, a) => - MonoidK[V].combineK(b, Applicative[V].pure(a)) - ) - vals +// /** NB: O(N + M ln M), N = rows.size, M = R2.rows.size */ +// transparent inline def --[R2 <: Relation[V]](r2: R2)(using +// ev: r2.schema.Values =:= schema.Values +// )(using Foldable[V])(using FunctorFilter[V]) = +// import cats.syntax.all.toFoldableOps +// val set2 = r2.rows.asInstanceOf[V[Row]].foldLeft(Set[Row]())(_ + _) +// filterNot(set2.contains) + +// final def groupBy[B](f: Row => B)(using Order[B])(using Foldable[V])(using MonoidK[V])(using +// Applicative[V] +// ): SortedMap[B, V[Row]] = +// groupMap(key = f)(identity) + +// final def groupMap[K, B](key: Row => K)(f: Row => B)(using K: Order[K])(using Foldable[V])(using +// SemigroupK[V] +// )(using Applicative[V]): SortedMap[K, V[B]] = +// given ordering: Ordering[K] = K.toOrdering +// import cats.syntax.all.toFoldableOps +// import cats.syntax.all.toSemigroupKOps +// val app = Applicative[V] +// rows.foldLeft(SortedMap.empty[K, V[B]])((m, elem) => +// val k = key(elem) +// m.get(k) match { +// case Some(b) => m.updated(key = k, value = b <+> app.pure(f(elem))) +// case None => m + (k -> app.pure(f(elem))) +// } +// ) +// final def groupMapReduce[K, B](key: Row => K)(f: Row => B)(using K: Order[K], S: Semigroup[B])( +// using Foldable[V] +// ): SortedMap[K, B] = +// groupMapReduceWith(key)(f)(S.combine) + +// final def groupMapReduceWith[K, B](key: Row => K)(f: Row => B)(combine: (B, B) => B)(using +// K: Order[K] +// )(using Foldable[V]): SortedMap[K, B] = +// given ordering: Ordering[K] = K.toOrdering +// import cats.syntax.all.toFoldableOps + +// rows.foldLeft(SortedMap.empty[K, B])((m, elem) => +// val k = key(elem) + +// m.get(k) match { +// case Some(b) => m.updated(key = k, value = combine(b, f(elem))) +// case None => m + (k -> f(elem)) +// } +// ) +// def toSemigroup[A](combineImpl: (A, A) => A): Semigroup[A] = +// new Semigroup[A]: +// def combine(a: A, b: A): A = combineImpl(a, b) + +// transparent inline def groupMapReduceS[ +// KeySchema <: RecordSchema, +// AggregateSchema <: RecordSchema +// ]( +// keySchema: KeySchema, +// aggregateSchema: AggregateSchema +// // )( +// // resultSchema: RecordSchema.Concat[keySchema.type, aggregateSchema.type],//inline schema3: RecordSchema.Concat[this.type, schema2.type] +// )( +// inline k: Row => keySchema.Values, +// inline m: Row => aggregateSchema.Values +// )(using Order[keySchema.Values])(using +// Semigroup[aggregateSchema.Values] +// ) // to aggregate values, something like `sum` +// (using MonoidK[V])(using Applicative[V])(using Foldable[V]) +// // : Relation[V]{ +// // // type Schema = resultSchema.type +// // } +// = + +// val resultSchema = keySchema.concat[aggregateSchema.type]( +// aggregateSchema +// ) // : RecordSchema.Concat[keySchema.type, aggregateSchema.type] + +// val grouped = groupMapReduce[keySchema.Values, aggregateSchema.Values](k)(m) + +// convertSortedMapToRelation(keySchema, aggregateSchema)(grouped) +// // val concat = keySchema.concatValues(aggregateSchema)(resultSchema) +// // // concat +// // val allVals: Iterable[resultSchema.Values] = grouped.toIterable.map(concat(_, _)) +// // import cats.MonoidK.ops.toAllMonoidKOps +// // val vals = allVals.foldLeft(MonoidK[V].empty[resultSchema.Values])((b, a) => b <+> Applicative[V].pure(a)) +// // Relation.apply(resultSchema)(vals) + +// transparent inline def convertSortedMapToRelation[ +// V[_], +// KeySchema <: RecordSchema, +// AggregateSchema <: RecordSchema +// // ResultSchema <: RecordSchema.Concat[KeySchema, AggregateSchema] +// ]( +// keySchema: KeySchema, +// aggregateSchema: AggregateSchema +// // )( +// // resultSchema: RecordSchema.Concat[keySchema.type, aggregateSchema.type],//inline schema3: RecordSchema.Concat[this.type, schema2.type] +// )(grouped: SortedMap[keySchema.Values, aggregateSchema.Values])(using Order[keySchema.Values])(using +// Semigroup[aggregateSchema.Values] +// ) // to aggregate values, something like `sum` +// (using MonoidK[V])(using Applicative[V])(using Foldable[V]): Relation[V] { +// type Schema <: RecordSchema.Concat[keySchema.type, aggregateSchema.type] +// } = +// val resultSchema = keySchema.concat[aggregateSchema.type](aggregateSchema) +// val concat = keySchema.concatValues(aggregateSchema)(resultSchema) +// val allVals: Seq[resultSchema.Values] = grouped.toSeq.map(concat(_, _)) +// val vals: V[resultSchema.Values] = +// allVals.foldLeft(MonoidK[V].empty[resultSchema.Values])((b, a) => +// MonoidK[V].combineK(b, Applicative[V].pure(a)) +// ) +// Relation.apply[resultSchema.type, V](resultSchema)(vals) + +// transparent inline def convertSortedMapToV[ +// V[_], +// KeySchema <: RecordSchema, +// AggregateSchema <: RecordSchema +// // ResultSchema <: RecordSchema.Concat[KeySchema, AggregateSchema] +// ]( +// keySchema: KeySchema, +// aggregateSchema: AggregateSchema +// )(grouped: SortedMap[keySchema.Values, aggregateSchema.Values])(using Order[keySchema.Values])(using +// Semigroup[aggregateSchema.Values] +// ) // to aggregate values, something like `sum` +// (using MonoidK[V])(using Applicative[V])(using Foldable[V]) +// // : Relation[V]{ +// // // type Schema = resultSchema.type +// // } +// = +// val resultSchema = +// keySchema.concat(aggregateSchema) // : RecordSchema.Concat[keySchema.type, aggregateSchema.type] +// val concat = keySchema.concatValues(aggregateSchema)(resultSchema) +// val allVals: Seq[resultSchema.Values] = grouped.toSeq.map(concat(_, _)) +// val vals = allVals.foldLeft(MonoidK[V].empty[resultSchema.Values])((b, a) => +// MonoidK[V].combineK(b, Applicative[V].pure(a)) +// ) +// vals object Relation: - transparent inline def apply[S1 <: RecordSchema, V[_]](s1: S1)(inline v: V[s1.Values]) = - new Relation[V] { - type Schema = s1.type - val schema = s1 - val rows = v + transparent inline def apply[S1 <: RecordSchema, VS1, V[_]]( + s1: S1 + )(using svt1: SchemaValueType[S1, VS1])(inline v: V[svt1.Value]) = + new Relation[S1, VS1, V](s1)(using svt1) { + type Schema = S1 + val rows = v.asInstanceOf[V[VS1]] } - transparent inline def empty[S1 <: RecordSchema, V[_]](inline s1: S1)(using MonoidK[V]) = - apply(s1)(MonoidK[V].empty) + transparent inline def empty[S1 <: RecordSchema, VS1, V[_]](s1: S1)(using svt1: SchemaValueType[S1, VS1])(using MonoidK[V]) = + apply[S1, VS1, V](s1)(using svt1)(MonoidK[V].empty) - type RelationOf[S <: RecordSchema] = [V[_]] =>> Relation[V] { - type Schema = S - } - type RelationOfV[V[_]] = [S <: RecordSchema] =>> Relation[V] { - type Schema = S - } + // type RelationOf[S <: RecordSchema] = [V[_]] =>> Relation[V] { + // type Schema = S + // } + // type RelationOfV[V[_]] = [S <: RecordSchema] =>> Relation[V] { + // type Schema = S + // } extension (tb: TableBuilder) - transparent inline def relation[V[_]](inline values1: V[tb.Row]): Relation[V] = + transparent inline def relation[TSV, V[_]](using + rsvt: SchemaValueType[tb.TableSchema, TSV] + )(values1: V[rsvt.Value]): Relation[tb.TableSchema, TSV, V] = Relation(tb.tableSchema)(values1) diff --git a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/relalg/RelationF.scala b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/relalg/RelationF.scala index 6f614a1..2652c7f 100644 --- a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/relalg/RelationF.scala +++ b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/relalg/RelationF.scala @@ -16,8 +16,10 @@ import ru.primetalk.typed.ontology.simple.meta.ForeignKeyId0 import ru.primetalk.typed.ontology.simple.meta.RecordSchema import ru.primetalk.typed.ontology.simple.meta.RecordProperty import ru.primetalk.typed.ontology.simple.meta.RecordProperty0 +import ru.primetalk.typed.ontology.simple.meta.SchemaValueType import ru.primetalk.typed.ontology.simple.meta.TableBuilder import fs2._ +import ru.primetalk.typed.ontology.simple.meta.RecordSchemaValueType /** RelationF is a pair of schema and a stream of instances of that schema. F - is the effect type. */ @@ -26,8 +28,12 @@ abstract class RelationF[+F[_]] extends ExprClassicDsl: type Schema <: RecordSchema val schema: Schema - type Row = schema.Values - val rows: Stream[F, schema.Values] + + val svt: SchemaValueType[Schema, Row] // = summon[SchemaValueType[schema.type]] + + type Row + + val rows: Stream[F, Row] def show[F2[x] >: F[x]](using Concurrent[F2], Functor[F2]): F2[String] = import cats.syntax.functor.given @@ -41,242 +47,242 @@ abstract class RelationF[+F[_]] extends ExprClassicDsl: .mkString("\n") } - transparent inline def projection[S2 <: RecordSchema, F2[x] >: F[x]](s2: S2)(using Functor[F2]) = - import cats.syntax.functor.given - val f = s2.projectorFrom(schema) - val vals = rows.map(f) - RelationF(s2)(vals) +// transparent inline def projection[S2 <: RecordSchema, F2[x] >: F[x]](s2: S2)(using Functor[F2]) = +// import cats.syntax.functor.given +// val f = s2.projectorFrom(schema) +// val vals = rows.map(f) +// RelationF(s2)(vals) - transparent inline def crossProductFrom[R1 <: RelationF[F2], F2[x] >: F[x]](r1: R1)(using - FlatMap[F2] - ): RelationF[F2] = - import cats.syntax.flatMap.given - val schema3 = r1.schema.appendOtherSchema(schema) - val concatValues: (r1.schema.Values, schema.Values) => schema3.Values = - r1.schema.appendValues(schema)(schema3) - val vals = - for - row1 <- r1.rows - row2 <- this.rows - yield concatValues(row1, row2) - RelationF(schema3)(vals) +// transparent inline def crossProductFrom[R1 <: RelationF[F2], F2[x] >: F[x]](r1: R1)(using +// FlatMap[F2] +// ): RelationF[F2] = +// import cats.syntax.flatMap.given +// val schema3 = r1.schema.appendOtherSchema(schema) +// val concatValues: (r1.schema.Values, schema.Values) => schema3.Values = +// r1.schema.appendValues(schema)(schema3) +// val vals = +// for +// row1 <- r1.rows +// row2 <- this.rows +// yield concatValues(row1, row2) +// RelationF(schema3)(vals) - transparent inline def crossProduct[R2 <: RelationF[F2], F2[x] >: F[x]](r2: R2)(using - FlatMap[F2] - ) = - import cats.syntax.flatMap.given - val schema3 = schema.appendOtherSchema(r2.schema) - val f: (schema.Values, r2.schema.Values) => schema3.Values = - schema.appendValues(r2.schema)(schema3) - val vals = - for - row1 <- this.rows - row2 <- r2.rows - yield f(row1, row2) - RelationF[schema3.type, F2](schema3)(vals) +// transparent inline def crossProduct[R2 <: RelationF[F2], F2[x] >: F[x]](r2: R2)(using +// FlatMap[F2] +// ) = +// import cats.syntax.flatMap.given +// val schema3 = schema.appendOtherSchema(r2.schema) +// val f: (schema.Values, r2.schema.Values) => schema3.Values = +// schema.appendValues(r2.schema)(schema3) +// val vals = +// for +// row1 <- this.rows +// row2 <- r2.rows +// yield f(row1, row2) +// RelationF[schema3.type, F2](schema3)(vals) - transparent inline def join[FK <: ForeignKeyId0, R2 <: RelationF[F2], F2[x] >: F[x]]( - inline fk: FK - )(r2: R2)(using FlatMap[F2])(using FunctorFilter[F2]) = - import cats.syntax.flatMap.given - import cats.syntax.functorFilter.given - val schema3 = schema.appendOtherSchema(r2.schema) - val concatValues: (schema.Values, r2.schema.Values) => schema3.Values = - schema.appendValues(r2.schema)(schema3) - val pred: schema3.Values => Boolean = schema3.fkPredicate(fk) - val vals = - for - row1 <- this.rows - row2 <- r2.rows - row3 = concatValues(row1, row2) - // if pred(row3) - yield row3 - val filtered = vals.filter(pred) - RelationF[schema3.type, F2](schema3)(filtered) +// transparent inline def join[FK <: ForeignKeyId0, R2 <: RelationF[F2], F2[x] >: F[x]]( +// inline fk: FK +// )(r2: R2)(using FlatMap[F2])(using FunctorFilter[F2]) = +// import cats.syntax.flatMap.given +// import cats.syntax.functorFilter.given +// val schema3 = schema.appendOtherSchema(r2.schema) +// val concatValues: (schema.Values, r2.schema.Values) => schema3.Values = +// schema.appendValues(r2.schema)(schema3) +// val pred: schema3.Values => Boolean = schema3.fkPredicate(fk) +// val vals = +// for +// row1 <- this.rows +// row2 <- r2.rows +// row3 = concatValues(row1, row2) +// // if pred(row3) +// yield row3 +// val filtered = vals.filter(pred) +// RelationF[schema3.type, F2](schema3)(filtered) - transparent inline def prependCalcColumn[P <: RecordProperty0, F2[x] >: F[x]](p: P)( - inline f: Row => p.P - )(using FlatMap[F2]) = - import cats.syntax.flatMap.given - val schema3 = p #: schema - val vals = rows.map(row => (f(row) *: row).asInstanceOf[schema3.Values]) - RelationF(schema3)(vals) +// transparent inline def prependCalcColumn[P <: RecordProperty0, F2[x] >: F[x]](p: P)( +// inline f: Row => p.P +// )(using FlatMap[F2]) = +// import cats.syntax.flatMap.given +// val schema3 = p #: schema +// val vals = rows.map(row => (f(row) *: row).asInstanceOf[schema3.Values]) +// RelationF(schema3)(vals) - transparent inline def rename[T, P1 <: RecordProperty[T], P2 <: RecordProperty[T], F2[x] >: F[x]]( - inline p1: P1, - p2: P2 - )(using Functor[F2]) = - val schema3 = schema.rename(p1, p2) - val vals = rows.map(_.asInstanceOf[schema3.Values]) - RelationF[schema3.type, F2](schema3)(vals) +// transparent inline def rename[T, P1 <: RecordProperty[T], P2 <: RecordProperty[T], F2[x] >: F[x]]( +// inline p1: P1, +// p2: P2 +// )(using Functor[F2]) = +// val schema3 = schema.rename(p1, p2) +// val vals = rows.map(_.asInstanceOf[schema3.Values]) +// RelationF[schema3.type, F2](schema3)(vals) - transparent inline def ++[R2 <: RelationF[F2], F2[x] >: F[x]](r2: R2)(using - ev: r2.schema.Values =:= schema.Values - )(using SemigroupK[F2])(using Functor[F2]) = - import cats.syntax.all.toSemigroupKOps - val rows1: Stream[F2, schema.Values] = rows // .map(identity) - val vals: Stream[F2, schema.Values] = rows1 <+> r2.rows.map(ev) - RelationF[schema.type, F2](schema)(vals) +// transparent inline def ++[R2 <: RelationF[F2], F2[x] >: F[x]](r2: R2)(using +// ev: r2.schema.Values =:= schema.Values +// )(using SemigroupK[F2])(using Functor[F2]) = +// import cats.syntax.all.toSemigroupKOps +// val rows1: Stream[F2, schema.Values] = rows // .map(identity) +// val vals: Stream[F2, schema.Values] = rows1 <+> r2.rows.map(ev) +// RelationF[schema.type, F2](schema)(vals) - transparent inline def replaceRows[F2[x] >: F[x]](inline f: Stream[F, Row] => Stream[F2, Row]) = - RelationF(schema)(f(rows)) +// transparent inline def replaceRows[F2[x] >: F[x]](inline f: Stream[F, Row] => Stream[F2, Row]) = +// RelationF(schema)(f(rows)) - transparent inline def filter[F2[x] >: F[x]](inline predicate: Row => Boolean)(using - FunctorFilter[F2] - ) = - import cats.syntax.functorFilter.given - replaceRows(_.filter(predicate)) - // TODO: try optimizing withFilter - transparent inline def withFilter[F2[x] >: F[x]](inline predicate: Row => Boolean)(using - FunctorFilter[F2] - ) = - import cats.syntax.functorFilter.given - replaceRows(_.filter(predicate)) +// transparent inline def filter[F2[x] >: F[x]](inline predicate: Row => Boolean)(using +// FunctorFilter[F2] +// ) = +// import cats.syntax.functorFilter.given +// replaceRows(_.filter(predicate)) +// // TODO: try optimizing withFilter +// transparent inline def withFilter[F2[x] >: F[x]](inline predicate: Row => Boolean)(using +// FunctorFilter[F2] +// ) = +// import cats.syntax.functorFilter.given +// replaceRows(_.filter(predicate)) - transparent inline def filterNot[F2[x] >: F[x]](inline predicate: Row => Boolean)(using - FunctorFilter[F2] - ) = - import cats.syntax.functorFilter.given - replaceRows(_.filterNot(predicate)) +// transparent inline def filterNot[F2[x] >: F[x]](inline predicate: Row => Boolean)(using +// FunctorFilter[F2] +// ) = +// import cats.syntax.functorFilter.given +// replaceRows(_.filterNot(predicate)) - /** NB: O(N + M ln M), N = rows.size, M = R2.rows.size */ - transparent inline def --[R2 <: RelationF[F2], F2[x] >: F[x]](r2: R2)(using - ev: r2.schema.Values =:= schema.Values - )(using Functor[F2])(using FunctorFilter[F2])(using Concurrent[F2]): F2[Any] = - Functor[F2].map(r2.rows.compile.toVector) { vector => - val set2 = vector.toSet.map(ev) - filterNot[F2](set2.contains) - } +// /** NB: O(N + M ln M), N = rows.size, M = R2.rows.size */ +// transparent inline def --[R2 <: RelationF[F2], F2[x] >: F[x]](r2: R2)(using +// ev: r2.schema.Values =:= schema.Values +// )(using Functor[F2])(using FunctorFilter[F2])(using Concurrent[F2]): F2[Any] = +// Functor[F2].map(r2.rows.compile.toVector) { vector => +// val set2 = vector.toSet.map(ev) +// filterNot[F2](set2.contains) +// } - // final def groupAdjacentBy[B, F2[x] >: F[x]](f: Row => B)(using Order[B])(using Foldable[F2])(using MonoidK[F2])(using Applicative[F2]): SortedMap[B, V[Row]] = - // groupMap(key = f)(identity) +// // final def groupAdjacentBy[B, F2[x] >: F[x]](f: Row => B)(using Order[B])(using Foldable[F2])(using MonoidK[F2])(using Applicative[F2]): SortedMap[B, V[Row]] = +// // groupMap(key = f)(identity) - // final def groupAdjacentMap[K, B, F2[x] >: F[x]](key: Row => K)(f: Row => B)(using K: Order[K])(using Foldable[F2])(using SemigroupK[F2])(using Applicative[F2]): SortedMap[K, V[B]] = - // given ordering: Ordering[K] = K.toOrdering - // import cats.syntax.all.toFoldableOps - // import cats.syntax.all.toSemigroupKOps - // val app = Applicative[F2] - // rows.foldLeft(SortedMap.empty[K, V[B]])( (m, elem) => - // val k = key(elem) - // m.get(k) match { - // case Some(b) => m.updated(key = k, value = b <+> app.pure(f(elem))) - // case None => m + (k -> app.pure(f(elem))) - // } - // ) -// final def groupMapReduce[K, B](key: Row => K)(f: Row => B)(using K: Order[K], S: Semigroup[B])(using Foldable[F2]): SortedMap[K, B] = -// groupMapReduceWith(key)(f)(S.combine) +// // final def groupAdjacentMap[K, B, F2[x] >: F[x]](key: Row => K)(f: Row => B)(using K: Order[K])(using Foldable[F2])(using SemigroupK[F2])(using Applicative[F2]): SortedMap[K, V[B]] = +// // given ordering: Ordering[K] = K.toOrdering +// // import cats.syntax.all.toFoldableOps +// // import cats.syntax.all.toSemigroupKOps +// // val app = Applicative[F2] +// // rows.foldLeft(SortedMap.empty[K, V[B]])( (m, elem) => +// // val k = key(elem) +// // m.get(k) match { +// // case Some(b) => m.updated(key = k, value = b <+> app.pure(f(elem))) +// // case None => m + (k -> app.pure(f(elem))) +// // } +// // ) +// // final def groupMapReduce[K, B](key: Row => K)(f: Row => B)(using K: Order[K], S: Semigroup[B])(using Foldable[F2]): SortedMap[K, B] = +// // groupMapReduceWith(key)(f)(S.combine) -// final def groupMapReduceWith[K, B](key: Row => K)(f: Row => B)(combine: (B, B) => B)(using K: Order[K])(using Foldable[F2]): SortedMap[K, B] = -// given ordering: Ordering[K] = K.toOrdering -// import cats.syntax.all.toFoldableOps +// // final def groupMapReduceWith[K, B](key: Row => K)(f: Row => B)(combine: (B, B) => B)(using K: Order[K])(using Foldable[F2]): SortedMap[K, B] = +// // given ordering: Ordering[K] = K.toOrdering +// // import cats.syntax.all.toFoldableOps -// rows.foldLeft(SortedMap.empty[K, B])( (m, elem) => -// val k = key(elem) +// // rows.foldLeft(SortedMap.empty[K, B])( (m, elem) => +// // val k = key(elem) -// m.get(k) match { -// case Some(b) => m.updated(key = k, value = combine(b, f(elem))) -// case None => m + (k -> f(elem)) -// } +// // m.get(k) match { +// // case Some(b) => m.updated(key = k, value = combine(b, f(elem))) +// // case None => m + (k -> f(elem)) +// // } -// ) - def toSemigroup[A](combineImpl: (A, A) => A): Semigroup[A] = - new Semigroup[A]: - def combine(a: A, b: A): A = combineImpl(a, b) +// // ) +// def toSemigroup[A](combineImpl: (A, A) => A): Semigroup[A] = +// new Semigroup[A]: +// def combine(a: A, b: A): A = combineImpl(a, b) -// transparent inline def groupMapReduceS[ +// // transparent inline def groupMapReduceS[ +// // KeySchema <: RecordSchema, +// // AggregateSchema <: RecordSchema +// // ]( +// // inline keySchema: KeySchema, +// // inline aggregateSchema: AggregateSchema +// // )( +// // inline resultSchema: RecordSchema.Concat[keySchema.type, aggregateSchema.type],//inline schema3: RecordSchema.Concat[this.type, schema2.type] +// // )( +// // inline k: Row => keySchema.Values, +// // inline m: Row => aggregateSchema.Values +// // )(using Order[keySchema.Values]) +// // (using Semigroup[aggregateSchema.Values])// to aggregate values, something like `sum` +// // (using MonoidK[F2]) +// // (using Applicative[F2]) +// // (using Foldable[F2]) +// // // : RelationF[F2], F2[x] >: F[x]{ +// // // // type Schema = resultSchema.type +// // // } +// // = +// // // ( +// // // val resultSchema = keySchema.concat(aggregateSchema)// : RecordSchema.Concat[keySchema.type, aggregateSchema.type] +// // // ) +// // val grouped = groupMapReduce[keySchema.Values, aggregateSchema.Values](k)(m) + +// // convertSortedMapToRelation(keySchema, aggregateSchema)(resultSchema)(grouped) +// // // val concat = keySchema.concatValues(aggregateSchema)(resultSchema) +// // // // concat +// // // val allVals: Iterable[resultSchema.Values] = grouped.toIterable.map(concat(_, _)) +// // // import cats.MonoidK.ops.toAllMonoidKOps +// // // val vals = allVals.foldLeft(MonoidK[F2].empty[resultSchema.Values])((b, a) => b <+> Applicative[F2].pure(a)) +// // // Relation.apply(resultSchema)(vals) +// transparent inline def convertSortedMapToRelationF[ +// F2[_], // KeySchema <: RecordSchema, // AggregateSchema <: RecordSchema -// ]( -// inline keySchema: KeySchema, -// inline aggregateSchema: AggregateSchema -// )( -// inline resultSchema: RecordSchema.Concat[keySchema.type, aggregateSchema.type],//inline schema3: RecordSchema.Concat[this.type, schema2.type] -// )( -// inline k: Row => keySchema.Values, -// inline m: Row => aggregateSchema.Values -// )(using Order[keySchema.Values]) -// (using Semigroup[aggregateSchema.Values])// to aggregate values, something like `sum` -// (using MonoidK[F2]) -// (using Applicative[F2]) -// (using Foldable[F2]) -// // : RelationF[F2], F2[x] >: F[x]{ -// // // type Schema = resultSchema.type -// // } -// = -// // ( -// // val resultSchema = keySchema.concat(aggregateSchema)// : RecordSchema.Concat[keySchema.type, aggregateSchema.type] -// // ) -// val grouped = groupMapReduce[keySchema.Values, aggregateSchema.Values](k)(m) - -// convertSortedMapToRelation(keySchema, aggregateSchema)(resultSchema)(grouped) -// // val concat = keySchema.concatValues(aggregateSchema)(resultSchema) -// // // concat -// // val allVals: Iterable[resultSchema.Values] = grouped.toIterable.map(concat(_, _)) -// // import cats.MonoidK.ops.toAllMonoidKOps -// // val vals = allVals.foldLeft(MonoidK[F2].empty[resultSchema.Values])((b, a) => b <+> Applicative[F2].pure(a)) -// // Relation.apply(resultSchema)(vals) -transparent inline def convertSortedMapToRelationF[ - F2[_], - KeySchema <: RecordSchema, - AggregateSchema <: RecordSchema - // ResultSchema <: RecordSchema.Concat[KeySchema, AggregateSchema] -]( - keySchema: KeySchema, - aggregateSchema: AggregateSchema -)( - resultSchema: RecordSchema.Concat[ - keySchema.type, - aggregateSchema.type - ] // inline schema3: RecordSchema.Concat[this.type, schema2.type] -)(grouped: SortedMap[keySchema.Values, aggregateSchema.Values])(using Order[keySchema.Values])(using - Semigroup[aggregateSchema.Values] -) // to aggregate values, something like `sum` -(using MonoidK[F2])(using Applicative[F2])(using Foldable[F2]): RelationF[F2] { - type Schema = resultSchema.type -} = - val concat = keySchema.concatValues(aggregateSchema)(resultSchema) - val vals: Stream[F2, resultSchema.Values] = Stream.emits(grouped.toSeq.map(concat(_, _))) - RelationF.apply[RecordSchema.Concat[keySchema.type, aggregateSchema.type], F2](resultSchema)(vals) +// // ResultSchema <: RecordSchema.Concat[KeySchema, AggregateSchema] +// ]( +// keySchema: KeySchema, +// aggregateSchema: AggregateSchema +// )( +// resultSchema: RecordSchema.Concat[ +// keySchema.type, +// aggregateSchema.type +// ] // inline schema3: RecordSchema.Concat[this.type, schema2.type] +// )(grouped: SortedMap[keySchema.Values, aggregateSchema.Values])(using Order[keySchema.Values])(using +// Semigroup[aggregateSchema.Values] +// ) // to aggregate values, something like `sum` +// (using MonoidK[F2])(using Applicative[F2])(using Foldable[F2]): RelationF[F2] { +// type Schema = resultSchema.type +// } = +// val concat = keySchema.concatValues(aggregateSchema)(resultSchema) +// val vals: Stream[F2, resultSchema.Values] = Stream.emits(grouped.toSeq.map(concat(_, _))) +// RelationF.apply[RecordSchema.Concat[keySchema.type, aggregateSchema.type], F2](resultSchema)(vals) -transparent inline def convertSortedMapToVF[ - F2[_], - KeySchema <: RecordSchema, - AggregateSchema <: RecordSchema - // ResultSchema <: RecordSchema.Concat[KeySchema, AggregateSchema] -]( - keySchema: KeySchema, - aggregateSchema: AggregateSchema -)(grouped: SortedMap[keySchema.Values, aggregateSchema.Values])(using Order[keySchema.Values])(using - Semigroup[aggregateSchema.Values] -) // to aggregate values, something like `sum` -(using MonoidK[F2])(using Applicative[F2])(using Foldable[F2]) = - val resultSchema = - keySchema.concat(aggregateSchema) // : RecordSchema.Concat[keySchema.type, aggregateSchema.type] - val concat = keySchema.concatValues(aggregateSchema)(resultSchema) - val allVals: Iterable[resultSchema.Values] = grouped.toSeq.map(concat(_, _)) - val vals = allVals.foldLeft(MonoidK[F2].empty[resultSchema.Values])((b, a) => - MonoidK[F2].combineK(b, Applicative[F2].pure(a)) - ) - vals +// transparent inline def convertSortedMapToVF[ +// F2[_], +// KeySchema <: RecordSchema, +// AggregateSchema <: RecordSchema +// // ResultSchema <: RecordSchema.Concat[KeySchema, AggregateSchema] +// ]( +// keySchema: KeySchema, +// aggregateSchema: AggregateSchema +// )(grouped: SortedMap[keySchema.Values, aggregateSchema.Values])(using Order[keySchema.Values])(using +// Semigroup[aggregateSchema.Values] +// ) // to aggregate values, something like `sum` +// (using MonoidK[F2])(using Applicative[F2])(using Foldable[F2]) = +// val resultSchema = +// keySchema.concat(aggregateSchema) // : RecordSchema.Concat[keySchema.type, aggregateSchema.type] +// val concat = keySchema.concatValues(aggregateSchema)(resultSchema) +// val allVals: Iterable[resultSchema.Values] = grouped.toSeq.map(concat(_, _)) +// val vals = allVals.foldLeft(MonoidK[F2].empty[resultSchema.Values])((b, a) => +// MonoidK[F2].combineK(b, Applicative[F2].pure(a)) +// ) +// vals -object RelationF: - transparent inline def apply[S1 <: RecordSchema, F[_]](s1: S1)(inline v: Stream[F, s1.Values]) = - new RelationF[F] { - type Schema = s1.type - val schema = s1 - val rows = v - } +// object RelationF: +// transparent inline def apply[S1 <: RecordSchema, F[_]](s1: S1)(inline v: Stream[F, s1.Values]) = +// new RelationF[F] { +// type Schema = s1.type +// val schema = s1 +// val rows = v +// } - transparent inline def empty[S1 <: RecordSchema, F2[x]](s1: S1)(using MonoidK[F2]) = - apply[s1.type, F2](s1)(Stream.empty) +// transparent inline def empty[S1 <: RecordSchema, F2[x]](s1: S1)(using MonoidK[F2]) = +// apply[s1.type, F2](s1)(Stream.empty) - type RelationOf[S <: RecordSchema] = [F[_]] =>> RelationF[F] { - type Schema = S - } - type RelationOfV[F2[_]] = [S <: RecordSchema] =>> RelationF[F2] { - type Schema = S - } +// type RelationOf[S <: RecordSchema] = [F[_]] =>> RelationF[F] { +// type Schema = S +// } +// type RelationOfV[F2[_]] = [S <: RecordSchema] =>> RelationF[F2] { +// type Schema = S +// } -extension (tb: TableBuilder) - transparent inline def relationF[F[_]](inline values1: Stream[F, tb.Row]): RelationF[F] = - RelationF(tb.tableSchema)(values1) +// extension (tb: TableBuilder) +// transparent inline def relationF[F[_]](inline values1: Stream[F, tb.Row]): RelationF[F] = +// RelationF(tb.tableSchema)(values1) diff --git a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/relalg/TaglessPredicateDsl.scala b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/relalg/TaglessPredicateDsl.scala index f6fb93e..917a0de 100644 --- a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/relalg/TaglessPredicateDsl.scala +++ b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/relalg/TaglessPredicateDsl.scala @@ -3,37 +3,37 @@ package ru.primetalk.typed.ontology.simple.relalg import ru.primetalk.typed.ontology.simple.meta.RecordSchema import ru.primetalk.typed.ontology.simple.meta.RecordProperty0 -// doesn't work at the moment -trait TaglessPredicateDsl: - type Schema <: RecordSchema - val schema: Schema - type Row = schema.Values +// // doesn't work at the moment +// trait TaglessPredicateDsl: +// type Schema <: RecordSchema +// val schema: Schema +// type Row = schema.Values - trait TaglessDsl[E[_]]: - inline def getter[T](inline name: String, inline f: Row => T): E[T] - inline def property[P <: RecordProperty0](p: P): E[RecordProperty0.PropertyValueType[p.type]] - inline def value[T](inline v: T): E[T] - inline def equals[T](inline e1: E[T], inline e2: E[T]): E[Boolean] +// trait TaglessDsl[E[_]]: +// inline def getter[T](inline name: String, inline f: Row => T): E[T] +// inline def property[P <: RecordProperty0](p: P): E[RecordProperty0.PropertyValueType[p.type]] +// inline def value[T](inline v: T): E[T] +// inline def equals[T](inline e1: E[T], inline e2: E[T]): E[Boolean] - type RowFun = [T] =>> Row => T +// type RowFun = [T] =>> Row => T - object taglessEval extends TaglessDsl[RowFun]: // [T] =>> Row => T]: - inline def getter[T](inline name: String, inline f: Row => T): Row => T = f - inline def property[P <: RecordProperty0]( - p: P - ): Row => RecordProperty0.PropertyValueType[p.type] = - schema.propertyGetter(p) - inline def value[T](inline v: T): Row => T = _ => v - inline def equals[T](inline e1: Row => T, inline e2: Row => T): Row => Boolean = - r => e1(r) == e2(r) +// object taglessEval extends TaglessDsl[RowFun]: // [T] =>> Row => T]: +// inline def getter[T](inline name: String, inline f: Row => T): Row => T = f +// inline def property[P <: RecordProperty0]( +// p: P +// ): Row => RecordProperty0.PropertyValueType[p.type] = +// schema.propertyGetter(p) +// inline def value[T](inline v: T): Row => T = _ => v +// inline def equals[T](inline e1: Row => T, inline e2: Row => T): Row => Boolean = +// r => e1(r) == e2(r) - object taglessShow extends TaglessDsl[[T] =>> String]: - inline def getter[T](inline name: String, inline f: Row => T): String = name - inline def property[P <: RecordProperty0](p: P): String = - p.name - inline def value[T](inline v: T): String = s"$v" - inline def equals[T](inline e1: String, inline e2: String): String = - s"($e1 == $e2)" +// object taglessShow extends TaglessDsl[[T] =>> String]: +// inline def getter[T](inline name: String, inline f: Row => T): String = name +// inline def property[P <: RecordProperty0](p: P): String = +// p.name +// inline def value[T](inline v: T): String = s"$v" +// inline def equals[T](inline e1: String, inline e2: String): String = +// s"($e1 == $e2)" - inline def expr[T](inline e: [E[_]] => TaglessDsl[E] => E[T]): Row => T = - e[RowFun](taglessEval) +// inline def expr[T](inline e: [E[_]] => TaglessDsl[E] => E[T]): Row => T = +// e[RowFun](taglessEval) diff --git a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/sqlrelation/SqlRelation.scala b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/sqlrelation/SqlRelation.scala new file mode 100644 index 0000000..c62142b --- /dev/null +++ b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/sqlrelation/SqlRelation.scala @@ -0,0 +1,23 @@ +package ru.primetalk.typed.ontology.simple.sqlrelation + +import ru.primetalk.typed.ontology.simple.meta.RecordSchema +import ru.primetalk.typed.ontology.simple.relalg.ExprClassicDsl +import ru.primetalk.typed.ontology.simple.meta.SchemaValueType + +sealed trait SqlRelation[S <: RecordSchema, VS] extends ExprClassicDsl: + self => + + val schema: S + type Schema = S + + val svt: SchemaValueType[S, VS] + + type Row = VS + +final case class EntityRelation[S <: RecordSchema, VS](name: String, schema: S, svt: SchemaValueType[S, VS]) extends SqlRelation[S, VS] + +def sql[S<:RecordSchema, VS](r: SqlRelation[S, VS]): String = + r match + case EntityRelation(name, schema, svt) => + s"SELECT ${} FROM $name" + \ No newline at end of file diff --git a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/tmap/TypedMap.scala b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/tmap/TypedMap.scala index afa49bd..660d6a6 100644 --- a/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/tmap/TypedMap.scala +++ b/typed-ontology-simple-meta/src/main/scala/ru/primetalk/typed/ontology/simple/tmap/TypedMap.scala @@ -4,40 +4,40 @@ import scala.collection.SortedMap import ru.primetalk.typed.ontology.simple.meta.RecordProperty import ru.primetalk.typed.ontology.simple.meta.RecordProperty0 -/** Typed map allows to obtain the value of property and update it when needed. This is a simple - * one-level implementation of typed maps. Does not naturally support nested objects. - */ -trait TypedMap[M[_]] { - def apply[R, P <: RecordProperty[R]](m: M[R])( - p: P - ): Option[RecordProperty0.PropertyValueType[p.type]] - def updated[R, P <: RecordProperty[R]]( - m: M[R] - )(p: P, v: Option[RecordProperty0.PropertyValueType[p.type]]): M[R] -} +// /** Typed map allows to obtain the value of property and update it when needed. This is a simple +// * one-level implementation of typed maps. Does not naturally support nested objects. +// */ +// trait TypedMap[M[_]] { +// def apply[R, P <: RecordProperty[R]](m: M[R])( +// p: P +// ): Option[RecordProperty0.PropertyValueType[p.type]] +// def updated[R, P <: RecordProperty[R]]( +// m: M[R] +// )(p: P, v: Option[RecordProperty0.PropertyValueType[p.type]]): M[R] +// } -extension [M[_], R](tm: M[R])(using TypedMap[M]) - def apply[P <: RecordProperty[R]](p: P): Option[RecordProperty0.PropertyValueType[p.type]] = - summon[TypedMap[M]].apply(tm)(p) - def updated[P <: RecordProperty[R]]( - p: P, - v: Option[RecordProperty0.PropertyValueType[p.type]] - ): M[R] = - summon[TypedMap[M]].updated(tm)(p, v) +// extension [M[_], R](tm: M[R])(using TypedMap[M]) +// def apply[P <: RecordProperty[R]](p: P): Option[RecordProperty0.PropertyValueType[p.type]] = +// summon[TypedMap[M]].apply(tm)(p) +// def updated[P <: RecordProperty[R]]( +// p: P, +// v: Option[RecordProperty0.PropertyValueType[p.type]] +// ): M[R] = +// summon[TypedMap[M]].updated(tm)(p, v) -opaque type SimpleTypedMap[R] = Map[String, Any] +// opaque type SimpleTypedMap[R] = Map[String, Any] -object SimpleTypedMap: - given TypedMap[SimpleTypedMap] with - def apply[R, P <: RecordProperty[R]](m: SimpleTypedMap[R])( - p: P - ): Option[RecordProperty0.PropertyValueType[p.type]] = - m.get(p.name).asInstanceOf[Option[RecordProperty0.PropertyValueType[p.type]]] - def updated[R, P <: RecordProperty[R]]( - m: SimpleTypedMap[R] - )(p: P, v: Option[RecordProperty0.PropertyValueType[p.type]]): SimpleTypedMap[R] = - v match - case Some(v) => - m + (p.name -> v) - case None => - m -- Seq(p.name) +// object SimpleTypedMap: +// given TypedMap[SimpleTypedMap] with +// def apply[R, P <: RecordProperty[R]](m: SimpleTypedMap[R])( +// p: P +// ): Option[RecordProperty0.PropertyValueType[p.type]] = +// m.get(p.name).asInstanceOf[Option[RecordProperty0.PropertyValueType[p.type]]] +// def updated[R, P <: RecordProperty[R]]( +// m: SimpleTypedMap[R] +// )(p: P, v: Option[RecordProperty0.PropertyValueType[p.type]]): SimpleTypedMap[R] = +// v match +// case Some(v) => +// m + (p.name -> v) +// case None => +// m -- Seq(p.name) diff --git a/typed-ontology-simple-meta/src/test/scala/ru/primetalk/typed/ontology/simple/meta/BaseSpec.scala b/typed-ontology-simple-meta/src/test/scala/ru/primetalk/typed/ontology/simple/meta/BaseSpec.scala new file mode 100644 index 0000000..07487f2 --- /dev/null +++ b/typed-ontology-simple-meta/src/test/scala/ru/primetalk/typed/ontology/simple/meta/BaseSpec.scala @@ -0,0 +1,11 @@ +package ru.primetalk.typed.ontology.simple.meta + +import org.scalatest.{Args, Status, Suite, TestSuite} +import org.scalatest.concurrent.ScalaFutures +import org.scalatest.funspec.{AnyFunSpec, AnyFunSpecLike} +import org.scalatest.funsuite.AnyFunSuite +import org.scalatest.matchers.should.Matchers +import org.scalatest.matchers.should.Matchers.AnyShouldWrapper +import org.scalatest.wordspec.AnyWordSpec + +abstract class BaseSpec extends AnyFunSuite with Matchers diff --git a/typed-ontology-simple-meta/src/test/scala/ru/primetalk/typed/ontology/simple/meta/PersonSpec.scala b/typed-ontology-simple-meta/src/test/scala/ru/primetalk/typed/ontology/simple/meta/PersonSpec.scala index 311dd22..f94055c 100644 --- a/typed-ontology-simple-meta/src/test/scala/ru/primetalk/typed/ontology/simple/meta/PersonSpec.scala +++ b/typed-ontology-simple-meta/src/test/scala/ru/primetalk/typed/ontology/simple/meta/PersonSpec.scala @@ -1,10 +1,9 @@ package ru.primetalk.typed.ontology.simple.meta -import org.junit.Test - -class PersonSpec: +class PersonSpec extends BaseSpec: final case class Person(name: String, age: Int) derives SchemaProvider - @Test def schemaTest = + test("schema derivation test") { println(SchemaProvider[Person].schema) + } diff --git a/typed-ontology-simple-meta/src/test/scala/ru/primetalk/typed/ontology/simple/meta/Product.scala b/typed-ontology-simple-meta/src/test/scala/ru/primetalk/typed/ontology/simple/meta/Product.scala new file mode 100644 index 0000000..1fbd4b6 --- /dev/null +++ b/typed-ontology-simple-meta/src/test/scala/ru/primetalk/typed/ontology/simple/meta/Product.scala @@ -0,0 +1,25 @@ +package ru.primetalk.typed.ontology.simple.meta + +import SimpleTypes.{given, *} + +object Product extends TableBuilder: + object id extends column[Int] + val id1 = id + type Id = id1.type + object name extends column[String] + type Name = name.type + object price extends column[BigInt] + type Price = price.type + + type PriceSchema = Price #: EmptySchema + type TableSchema = Id #: Name #: Price #: EmptySchema + + implicit val tableSchema: TableSchema = fields(id, name, price) + val idNameSchema = fields(id, name) + val primaryKeySchema = fields(id) + + val fullSchema = infer[TableSchema] + val priceP = summon[RecordPropertyValueType.Aux1[Price]] + val svt = summon[RecordSchemaValueType[TableSchema]] + type Row = svt.Value +end Product diff --git a/typed-ontology-simple-meta/src/test/scala/ru/primetalk/typed/ontology/simple/meta/ProductSpec.scala b/typed-ontology-simple-meta/src/test/scala/ru/primetalk/typed/ontology/simple/meta/ProductSpec.scala new file mode 100644 index 0000000..6d272a2 --- /dev/null +++ b/typed-ontology-simple-meta/src/test/scala/ru/primetalk/typed/ontology/simple/meta/ProductSpec.scala @@ -0,0 +1,43 @@ +package ru.primetalk.typed.ontology.simple.meta + +class ProductSpec extends BaseSpec: + test("all") { + import Product.{given, *} + import SimpleTypes.{given, *} + val id1 = valueOf[Id] + val s1 = summon[SchemaValueType.Aux1[ScalarSchema1[Int]]] + val evS1 = summon[s1.Value =:= Int] + + val empty = emptySchemaSVT // summon[SchemaTupleValueType[EmptySchema]](using emptySchemaSVT) + val ev1 = summon[empty.Value =:= EmptyTuple] + val i1: empty.Value = EmptyTuple + + type IdRecord = Id #: EmptySchema + val idRecord = id #: EmptySchema + val idValue: ValueOf[id.type] = summon[ValueOf[id.type]] + // val p = propertyValueType[id.type] + + val idSchemaSvt = + summon[SchemaValueType.Aux1[id.Schema]] // (using scalarSchema1svt[Int, id.Schema]) + + val idRecordSvt1 = tuple1Schema[Int, id.type] + val idRecordSvt = summon[RecordSchemaValueType[IdRecord]] // (using tuple1Schema) + val idRecordSvt0: RecordSchemaValueType[IdRecord] = idRecordSvt + val i: idRecordSvt.Value = Tuple1(10) + + val ev2 = summon[idRecordSvt.Value =:= Tuple1[Int]] + + val nameSvt = summon[RecordSchemaValueType[name.type #: EmptySchema]] // (using tuple1Schema) + val svt2 = summon[RecordSchemaValueType[id.type #: name.type #: EmptySchema]] + + val svt = summon[RecordSchemaValueType[id.type #: name.type #: price.type #: EmptySchema]] + val product1: svt.Value = (1, "product1", BigInt(10)) + + val svt1 = Product.svt + val product2: svt1.Value = (2, "product1", BigInt(10)) + + val svt3 = summon[RecordSchemaValueType[Product.TableSchema]] + val product3: svt3.Value = (1, "product1", BigInt(10)) + + val product4: Product.Row = (3, "product1", BigInt(10)) + } diff --git a/typed-ontology-simple-meta/src/test/scala/ru/primetalk/typed/ontology/simple/meta/TupleSimpleTypesSpec.scala b/typed-ontology-simple-meta/src/test/scala/ru/primetalk/typed/ontology/simple/meta/TupleSimpleTypesSpec.scala new file mode 100644 index 0000000..e332061 --- /dev/null +++ b/typed-ontology-simple-meta/src/test/scala/ru/primetalk/typed/ontology/simple/meta/TupleSimpleTypesSpec.scala @@ -0,0 +1,60 @@ +package ru.primetalk.typed.ontology.simple.meta + +class TupleSimpleTypesSpec extends BaseSpec: + + object SimpleTypes1 extends TupleSimpleTypes + with ScalarSimpleTypes + with ConversionSimpleTypes + with PropertySimpleTypes + + test("all") { + import Product.{given, *} + import SimpleTypes1.{given, *} + + import ScalarSchema1.given + + val emptyTupleSchema = EmptyTupleSchema + val emptyTupleSchemaSvt = summon[SchemaValueType.Aux1[emptyTupleSchema.type]] + val evid1 = summon[emptyTupleSchemaSvt.Value =:= EmptyTuple] + val intSchema = summon[ScalarSchema1[Int]] + val intSchemaSvt = + summon[SchemaValueType.Aux1[intSchema.type]] // (using scalarSchema1svt[Int, intSchema.type]) + val evScalarInt = summon[intSchemaSvt.Value =:= (Int)] + + val stringSchema = summon[ScalarSchema1[String]] + val stringTuple = + NonEmptyTupleSchema[stringSchema.type, emptyTupleSchema.type](stringSchema, emptyTupleSchema) + val stringTupleSvt = summon[SchemaValueType.Aux1[stringTuple.type]] // (using nonEmptyTupleSchema) + val evScalar1 = summon[stringTupleSvt.Value =:= Tuple1[String]] + + val scalar2 = NonEmptyTupleSchema(intSchema, NonEmptyTupleSchema(stringSchema, EmptyTupleSchema)) + val scalar2Svt = summon[SchemaValueType.Aux1[scalar2.type]] // (using nonEmptyTupleSchema) + val evScalar2 = summon[scalar2Svt.Value =:= (Int, String)] + + println(Product.id1) + implicit val name1: Product.name.type = Product.name + + val rpvt1 = propertyValueType[String, Product.Name] + // assert(rpvt1.property == Product.name) + + val rpvt = summon[RecordPropertyValueType[Product.Name, String]] + summon[rpvt.Value =:= String] + } + + test("property projectors"){ + import SimpleTypes.{*, given} + val rpvt = summon[RecordPropertyValueType[Product.Price, BigInt]] + // assert(rpvt.property == Product.price) + val svtp = summon[SchemaValueType[rpvt.Schema, BigInt]] + summon[svtp.Value =:= BigInt] + val svtps = summon[SchemaValueType[Product.PriceSchema, Tuple1[BigInt]]] + summon[svtps.Value =:= Tuple1[BigInt]] + + require(Product.price != null) + val prj = propertyProjectorHead(using rpvt, svtp, svtps) + val priceProjector = summon[PropertyProjector[Product.PriceSchema, Tuple1[BigInt], Product.Price, BigInt]]( + using prj + ) + assert(priceProjector(Tuple1(BigInt(1))) == BigInt(1)) + + }