diff --git a/DEVELOPER.md b/DEVELOPER.md index 4e3ce7f2e..11bb7fbe1 100644 --- a/DEVELOPER.md +++ b/DEVELOPER.md @@ -43,7 +43,8 @@ In development: Updating apibuilder.me ================== - script/upload + script/update + script/update_daos SBT ========== diff --git a/api/app/db/InternalTasksDao.scala b/api/app/db/InternalTasksDao.scala index 6f87a0b1a..43789bb8e 100644 --- a/api/app/db/InternalTasksDao.scala +++ b/api/app/db/InternalTasksDao.scala @@ -37,7 +37,7 @@ class InternalTasksDao @Inject() ( organizationGuid: Option[UUID] = None, data: JsValue = Json.obj() ): Unit = { - if (dao.findByTypeIdAndTypeWithConnection(c, id, typ.toString).isEmpty) { + if (dao.findByTypeIdAndTypeWithConnection(c, (id, typ.toString)).isEmpty) { dao.upsertByTypeIdAndType( c, Constants.DefaultUserGuid, diff --git a/api/app/db/generated/GeneratorApibuilderSessionsDao.scala b/api/app/db/generated/GeneratorApibuilderSessionsDao.scala deleted file mode 100644 index f6b90ff46..000000000 --- a/api/app/db/generated/GeneratorApibuilderSessionsDao.scala +++ /dev/null @@ -1,238 +0,0 @@ -package db.generated - -import anorm._ -import db.DbHelpers -import io.flow.postgresql.{OrderBy, Query} -import java.sql.Connection -import java.util.UUID -import javax.inject.{Inject, Singleton} -import org.joda.time.DateTime -import play.api.db.{Database, NamedDatabase} - -case class Session( - id: String, - userGuid: UUID, - expiresAt: DateTime, - createdAt: DateTime, - createdByGuid: UUID, - updatedAt: DateTime, - updatedByGuid: UUID, - deletedAt: Option[DateTime], - deletedByGuid: Option[UUID] -) { - - lazy val form: SessionForm = SessionForm( - id = id, - userGuid = userGuid, - expiresAt = expiresAt, - createdAt = createdAt, - createdByGuid = createdByGuid, - updatedAt = updatedAt, - updatedByGuid = updatedByGuid, - deletedAt = deletedAt, - deletedByGuid = deletedByGuid - ) - -} - -case class SessionForm( - id: String, - userGuid: UUID, - expiresAt: DateTime, - createdAt: DateTime, - createdByGuid: UUID, - updatedAt: DateTime, - updatedByGuid: UUID, - deletedAt: Option[DateTime], - deletedByGuid: Option[UUID] -) - -@Singleton -class SessionsDao @Inject() ( - @NamedDatabase("default") db: Database -) { - - private val dbHelpers = DbHelpers(db, "sessions") - - private val BaseQuery = Query(""" - | select sessions.id, - | sessions.user_guid, - | sessions.expires_at, - | sessions.created_at, - | sessions.created_by_guid, - | sessions.updated_at, - | sessions.updated_by_guid, - | sessions.deleted_at, - | sessions.deleted_by_guid, - | sessions.hash_code - | from sessions - """.stripMargin) - - private val InsertQuery = Query(""" - | insert into sessions - | (id, user_guid, expires_at, created_at, created_by_guid, updated_at, updated_by_guid, deleted_at, deleted_by_guid, hash_code) - | values - | ({id}, {user_guid}::uuid, {expires_at}::timestamptz, {created_at}::timestamptz, {created_by_guid}::uuid, {updated_at}::timestamptz, {updated_by_guid}::uuid, {deleted_at}::timestamptz, {deleted_by_guid}::uuid, {hash_code}::bigint) - """.stripMargin) - - private val UpdateQuery = Query(""" - | update sessions - | set user_guid = {user_guid}::uuid, - | expires_at = {expires_at}::timestamptz, - | created_at = {created_at}::timestamptz, - | created_by_guid = {created_by_guid}::uuid, - | updated_at = {updated_at}::timestamptz, - | updated_by_guid = {updated_by_guid}::uuid, - | deleted_at = {deleted_at}::timestamptz, - | deleted_by_guid = {deleted_by_guid}::uuid, - | hash_code = {hash_code}::bigint - | where id = {id} - | and (sessions.hash_code is null or sessions.hash_code != {hash_code}::bigint) - """.stripMargin) - - private def bindQuery(query: Query, form: SessionForm): Query = { - query. - bind("user_guid", form.userGuid). - bind("expires_at", form.expiresAt). - bind("created_at", form.createdAt). - bind("created_by_guid", form.createdByGuid). - bind("updated_at", form.updatedAt). - bind("updated_by_guid", form.updatedByGuid). - bind("deleted_at", form.deletedAt). - bind("deleted_by_guid", form.deletedByGuid). - bind("hash_code", form.hashCode()) - } - - def insert(updatedBy: UUID, form: SessionForm): Unit = { - db.withConnection { implicit c => - insert(c, updatedBy, form) - } - } - - def insert(implicit c: Connection, updatedBy: UUID, form: SessionForm): Unit = { - bindQuery(InsertQuery, form). - bind("id", form.id). - anormSql().execute() - } - - def updateIfChangedById(updatedBy: UUID, id: String, form: SessionForm): Unit ={ - if (!findById(id).map(_.form).contains(form)) { - updateById(updatedBy, id, form) - } - } - - def updateById(updatedBy: UUID, id: String, form: SessionForm): Unit = { - db.withConnection { implicit c => - updateById(c, updatedBy, id, form) - } - } - - def updateById(implicit c: Connection, updatedBy: UUID, id: String, form: SessionForm): Unit = { - bindQuery(UpdateQuery, form). - bind("id", id). - anormSql().execute() - } - - def update(updatedBy: UUID, existing: Session, form: SessionForm): Unit = { - db.withConnection { implicit c => - update(c, updatedBy, existing, form) - } - } - - def update(implicit c: Connection, updatedBy: UUID, existing: Session, form: SessionForm): Unit = { - updateById(c, updatedBy, existing.id, form) - } - - def delete(deletedBy: UUID, session: Session): Unit = { - dbHelpers.delete(deletedBy, session.id) - } - - def deleteById(deletedBy: UUID, id: String): Unit = { - db.withConnection { implicit c => - deleteById(c, deletedBy, id) - } - } - - def deleteById(c: java.sql.Connection, deletedBy: UUID, id: String): Unit = { - dbHelpers.delete(c, deletedBy, id) - } - - def findById(id: String): Option[Session] = { - db.withConnection { implicit c => - findByIdWithConnection(c, id) - } - } - - def findByIdWithConnection(c: java.sql.Connection, id: String): Option[Session] = { - findAllWithConnection(c, ids = Some(Seq(id)), limit = 1).headOption - } - - def findAll( - ids: Option[Seq[String]] = None, - userGuid: Option[UUID] = None, - limit: Long, - offset: Long = 0, - orderBy: OrderBy = OrderBy("sessions.id") - ) ( - implicit customQueryModifier: Query => Query = { q => q } - ): Seq[Session] = { - db.withConnection { implicit c => - findAllWithConnection( - c, - ids = ids, - userGuid = userGuid, - limit = limit, - offset = offset, - orderBy = orderBy - )(customQueryModifier) - } - } - - def findAllWithConnection( - c: java.sql.Connection, - ids: Option[Seq[String]] = None, - userGuid: Option[UUID] = None, - limit: Long, - offset: Long = 0, - orderBy: OrderBy = OrderBy("sessions.id") - ) ( - implicit customQueryModifier: Query => Query = { q => q } - ): Seq[Session] = { - customQueryModifier(BaseQuery). - optionalIn("sessions.id", ids). - equals("sessions.user_guid", userGuid). - limit(limit). - offset(offset). - orderBy(orderBy.sql). - as(SessionsDao.parser().*)(c) - } - -} - -object SessionsDao { - - def parser(): RowParser[Session] = { - SqlParser.str("id") ~ - SqlParser.get[UUID]("user_guid") ~ - SqlParser.get[DateTime]("expires_at") ~ - SqlParser.get[DateTime]("created_at") ~ - SqlParser.get[UUID]("created_by_guid") ~ - SqlParser.get[DateTime]("updated_at") ~ - SqlParser.get[UUID]("updated_by_guid") ~ - SqlParser.get[DateTime]("deleted_at").? ~ - SqlParser.get[UUID]("deleted_by_guid").? map { - case id ~ userGuid ~ expiresAt ~ createdAt ~ createdByGuid ~ updatedAt ~ updatedByGuid ~ deletedAt ~ deletedByGuid => Session( - id = id, - userGuid = userGuid, - expiresAt = expiresAt, - createdAt = createdAt, - createdByGuid = createdByGuid, - updatedAt = updatedAt, - updatedByGuid = updatedByGuid, - deletedAt = deletedAt, - deletedByGuid = deletedByGuid - ) - } - } - -} \ No newline at end of file diff --git a/api/app/db/generated/PsqlApibuilderGeneratorInvocationsDao.scala b/api/app/db/generated/PsqlApibuilderGeneratorInvocationsDao.scala deleted file mode 100644 index 4d813e176..000000000 --- a/api/app/db/generated/PsqlApibuilderGeneratorInvocationsDao.scala +++ /dev/null @@ -1,329 +0,0 @@ -package db.generated - -import anorm._ -import com.mbryzek.util.IdGenerator -import io.flow.postgresql.{OrderBy, Query} -import java.sql.Connection -import java.util.UUID -import javax.inject.{Inject, Singleton} -import org.joda.time.DateTime -import play.api.db.Database - -case class GeneratorInvocation( - id: String, - key: String, - organizationKey: Option[String], - applicationKey: Option[String], - updatedByGuid: String, - createdAt: DateTime, - updatedAt: DateTime -) { - - lazy val form: GeneratorInvocationForm = GeneratorInvocationForm( - key = key, - organizationKey = organizationKey, - applicationKey = applicationKey - ) - -} - -case class GeneratorInvocationForm( - key: String, - organizationKey: Option[String], - applicationKey: Option[String] -) - -object GeneratorInvocationsTable { - val Schema: String = "public" - val Name: String = "generator_invocations" - val QualifiedName: String = s"$Schema.$Name" - - object Columns { - val Id: String = "id" - val Key: String = "key" - val OrganizationKey: String = "organization_key" - val ApplicationKey: String = "application_key" - val UpdatedByGuid: String = "updated_by_guid" - val CreatedAt: String = "created_at" - val UpdatedAt: String = "updated_at" - val HashCode: String = "hash_code" - val all: List[String] = List(Id, Key, OrganizationKey, ApplicationKey, UpdatedByGuid, CreatedAt, UpdatedAt, HashCode) - } -} - -trait BaseGeneratorInvocationsDao { - - def db: Database - - private val BaseQuery = Query(""" - | select generator_invocations.id, - | generator_invocations.key, - | generator_invocations.organization_key, - | generator_invocations.application_key, - | generator_invocations.updated_by_guid, - | generator_invocations.created_at, - | generator_invocations.updated_at, - | generator_invocations.hash_code - | from generator_invocations - """.stripMargin) - - def findById(id: String): Option[GeneratorInvocation] = { - db.withConnection { c => - findByIdWithConnection(c, id) - } - } - - def findByIdWithConnection(c: java.sql.Connection, id: String): Option[GeneratorInvocation] = { - findAllWithConnection(c, ids = Some(Seq(id)), limit = Some(1L)).headOption - } - - def iterateAll( - ids: Option[Seq[String]] = None, - pageSize: Long = 2000L, - ) ( - implicit customQueryModifier: Query => Query = { q => q } - ): Iterator[GeneratorInvocation] = { - def iterate(lastValue: Option[GeneratorInvocation]): Iterator[GeneratorInvocation] = { - val page = findAll( - ids = ids, - limit = Some(pageSize), - orderBy = OrderBy("generator_invocations.id"), - ) { q => customQueryModifier(q).greaterThan("generator_invocations.id", lastValue.map(_.id)) } - - page.lastOption match { - case None => Iterator.empty - case lastValue => page.iterator ++ iterate(lastValue) - } - } - - iterate(None) - } - - def findAll( - ids: Option[Seq[String]] = None, - limit: Option[Long], - offset: Long = 0, - orderBy: OrderBy = OrderBy("generator_invocations.id") - ) ( - implicit customQueryModifier: Query => Query = { q => q } - ): Seq[GeneratorInvocation] = { - db.withConnection { c => - findAllWithConnection( - c, - ids = ids, - limit = limit, - offset = offset, - orderBy = orderBy - )(customQueryModifier) - } - } - - def findAllWithConnection( - c: java.sql.Connection, - ids: Option[Seq[String]] = None, - limit: Option[Long], - offset: Long = 0, - orderBy: OrderBy = OrderBy("generator_invocations.id") - ) ( - implicit customQueryModifier: Query => Query = { q => q } - ): Seq[GeneratorInvocation] = { - customQueryModifier(BaseQuery). - optionalIn("generator_invocations.id", ids). - optionalLimit(limit). - offset(offset). - orderBy(orderBy.sql). - as(GeneratorInvocationsDao.parser.*)(c) - } - -} - -object GeneratorInvocationsDao { - - val parser: RowParser[GeneratorInvocation] = { - SqlParser.str("id") ~ - SqlParser.str("key") ~ - SqlParser.str("organization_key").? ~ - SqlParser.str("application_key").? ~ - SqlParser.str("updated_by_guid") ~ - SqlParser.get[DateTime]("created_at") ~ - SqlParser.get[DateTime]("updated_at") map { - case id ~ key ~ organizationKey ~ applicationKey ~ updatedByGuid ~ createdAt ~ updatedAt => GeneratorInvocation( - id = id, - key = key, - organizationKey = organizationKey, - applicationKey = applicationKey, - updatedByGuid = updatedByGuid, - createdAt = createdAt, - updatedAt = updatedAt - ) - } - } - -} - -@Singleton -class GeneratorInvocationsDao @Inject() ( - override val db: Database -) extends BaseGeneratorInvocationsDao { - - private val idGenerator = com.mbryzek.util.IdGenerator("gni") - - def randomId(): String = idGenerator.randomId() - - private val InsertQuery = Query(""" - | insert into generator_invocations - | (id, key, organization_key, application_key, updated_by_guid, hash_code) - | values - | ({id}, {key}, {organization_key}, {application_key}, {updated_by_guid}, {hash_code}::bigint) - """.stripMargin) - - private val UpdateQuery = Query(""" - | update generator_invocations - | set key = {key}, - | organization_key = {organization_key}, - | application_key = {application_key}, - | updated_by_guid = {updated_by_guid}, - | hash_code = {hash_code}::bigint - | where id = {id} - | and generator_invocations.hash_code != {hash_code}::bigint - """.stripMargin) - - private def bindQuery(query: Query, form: GeneratorInvocationForm): Query = { - query. - bind("key", form.key). - bind("organization_key", form.organizationKey). - bind("application_key", form.applicationKey). - bind("hash_code", form.hashCode()) - } - - private def toNamedParameter(updatedBy: UUID, id: String, form: GeneratorInvocationForm): Seq[NamedParameter] = { - Seq( - "id" -> id, - "key" -> form.key, - "organization_key" -> form.organizationKey, - "application_key" -> form.applicationKey, - "updated_by_guid" -> updatedBy, - "hash_code" -> form.hashCode() - ) - } - - def insert(updatedBy: UUID, form: GeneratorInvocationForm): String = { - db.withConnection { c => - insert(c, updatedBy, form) - } - } - - def insert(c: Connection, updatedBy: UUID, form: GeneratorInvocationForm): String = { - val id = randomId() - bindQuery(InsertQuery, form). - bind("id", id). - bind("updated_by_guid", updatedBy). - anormSql().execute()(c) - id - } - - def insertBatch(updatedBy: UUID, forms: Seq[GeneratorInvocationForm]): Seq[String] = { - db.withConnection { c => - insertBatchWithConnection(c, updatedBy, forms) - } - } - - def insertBatchWithConnection(c: Connection, updatedBy: UUID, forms: Seq[GeneratorInvocationForm]): Seq[String] = { - if (forms.nonEmpty) { - val ids = forms.map(_ => randomId()) - val params = ids.zip(forms).map { case (id, form) => toNamedParameter(updatedBy, id, form) } - BatchSql(InsertQuery.sql(), params.head, params.tail*).execute()(c) - ids - } else { - Nil - } - } - - def updateIfChangedById(updatedBy: UUID, id: String, form: GeneratorInvocationForm): Unit = { - if (!findById(id).map(_.form).contains(form)) { - updateById(updatedBy, id, form) - } - } - - def updateById(updatedBy: UUID, id: String, form: GeneratorInvocationForm): Unit = { - db.withConnection { c => - updateById(c, updatedBy, id, form) - } - } - - def updateById(c: Connection, updatedBy: UUID, id: String, form: GeneratorInvocationForm): Unit = { - bindQuery(UpdateQuery, form). - bind("id", id). - bind("updated_by_guid", updatedBy). - anormSql().execute()(c) - () - } - - def update(updatedBy: UUID, existing: GeneratorInvocation, form: GeneratorInvocationForm): Unit = { - db.withConnection { c => - update(c, updatedBy, existing, form) - } - } - - def update(c: Connection, updatedBy: UUID, existing: GeneratorInvocation, form: GeneratorInvocationForm): Unit = { - updateById(c, updatedBy, existing.id, form) - } - - def updateBatch(updatedBy: UUID, idsAndForms: Seq[(String, GeneratorInvocationForm)]): Unit = { - db.withConnection { c => - updateBatchWithConnection(c, updatedBy, idsAndForms) - } - } - - def updateBatchWithConnection(c: Connection, updatedBy: UUID, idsAndForms: Seq[(String, GeneratorInvocationForm)]): Unit = { - if (idsAndForms.nonEmpty) { - val params = idsAndForms.map { case (id, form) => toNamedParameter(updatedBy, id, form) } - BatchSql(UpdateQuery.sql(), params.head, params.tail*).execute()(c) - () - } - } - - def delete(deletedBy: UUID, generatorInvocation: GeneratorInvocation): Unit = { - db.withConnection { c => - delete(c, deletedBy, generatorInvocation) - } - } - - def delete(c: Connection, deletedBy: UUID, generatorInvocation: GeneratorInvocation): Unit = { - deleteById(c, deletedBy, generatorInvocation.id) - } - - def deleteById(deletedBy: UUID, id: String): Unit = { - db.withConnection { c => - deleteById(c, deletedBy, id) - } - } - - def deleteById(c: Connection, deletedBy: UUID, id: String): Unit = { - setJournalDeletedByUserId(c, deletedBy) - Query("delete from generator_invocations") - .equals("id", id) - .anormSql().executeUpdate()(c) - () - } - - def deleteAllByIds(deletedBy: UUID, ids: Seq[String]): Unit = { - db.withConnection { c => - deleteAllByIds(c, deletedBy, ids) - } - } - - def deleteAllByIds(c: Connection, deletedBy: UUID, ids: Seq[String]): Unit = { - setJournalDeletedByUserId(c, deletedBy) - Query("delete from generator_invocations") - .in("id", ids) - .anormSql().executeUpdate()(c) - () - } - - def setJournalDeletedByUserId(c: Connection, deletedBy: UUID): Unit = { - Query(s"SET journal.deleted_by_user_id = '${deletedBy}'").anormSql().executeUpdate()(c) - () - } - -} \ No newline at end of file diff --git a/api/app/db/generated/PsqlApibuilderTasksDao.scala b/api/app/db/generated/PsqlApibuilderTasksDao.scala deleted file mode 100644 index e3f31c7c2..000000000 --- a/api/app/db/generated/PsqlApibuilderTasksDao.scala +++ /dev/null @@ -1,614 +0,0 @@ -package db.generated - -import anorm.JodaParameterMetaData._ -import anorm._ -import io.flow.postgresql.{OrderBy, Query} -import java.sql.Connection -import java.util.UUID -import javax.inject.{Inject, Singleton} -import org.joda.time.DateTime -import play.api.db.Database -import play.api.libs.json.{JsValue, Json} - -case class Task( - id: String, - `type`: String, - typeId: String, - organizationGuid: Option[UUID], - numAttempts: Int, - nextAttemptAt: DateTime, - errors: Option[Seq[String]], - stacktrace: Option[String], - data: JsValue, - updatedByGuid: String, - createdAt: DateTime, - updatedAt: DateTime -) { - - lazy val form: TaskForm = TaskForm( - id = id, - `type` = `type`, - typeId = typeId, - organizationGuid = organizationGuid, - numAttempts = numAttempts, - nextAttemptAt = nextAttemptAt, - errors = errors, - stacktrace = stacktrace, - data = data - ) - -} - -case class TaskForm( - id: String, - `type`: String, - typeId: String, - organizationGuid: Option[UUID], - numAttempts: Int, - nextAttemptAt: DateTime, - errors: Option[Seq[String]], - stacktrace: Option[String], - data: JsValue -) - -object TasksTable { - val Schema: String = "public" - val Name: String = "tasks" - val QualifiedName: String = s"$Schema.$Name" - - object Columns { - val Id: String = "id" - val Type: String = "type" - val TypeId: String = "type_id" - val OrganizationGuid: String = "organization_guid" - val NumAttempts: String = "num_attempts" - val NextAttemptAt: String = "next_attempt_at" - val Errors: String = "errors" - val Stacktrace: String = "stacktrace" - val Data: String = "data" - val UpdatedByGuid: String = "updated_by_guid" - val CreatedAt: String = "created_at" - val UpdatedAt: String = "updated_at" - val HashCode: String = "hash_code" - val all: List[String] = List(Id, Type, TypeId, OrganizationGuid, NumAttempts, NextAttemptAt, Errors, Stacktrace, Data, UpdatedByGuid, CreatedAt, UpdatedAt, HashCode) - } -} - -trait BaseTasksDao { - - def db: Database - - private val BaseQuery = Query(""" - | select tasks.id, - | tasks.type, - | tasks.type_id, - | tasks.organization_guid, - | tasks.num_attempts, - | tasks.next_attempt_at, - | tasks.errors::text as errors_text, - | tasks.stacktrace, - | tasks.data::text as data_text, - | tasks.updated_by_guid, - | tasks.created_at, - | tasks.updated_at, - | tasks.hash_code - | from tasks - """.stripMargin) - - def findById(id: String): Option[Task] = { - db.withConnection { c => - findByIdWithConnection(c, id) - } - } - - def findByIdWithConnection(c: java.sql.Connection, id: String): Option[Task] = { - findAllWithConnection(c, ids = Some(Seq(id)), limit = Some(1L), orderBy = None).headOption - } - - def findByTypeIdAndType(typeId: String, `type`: String): Option[Task] = { - db.withConnection { c => - findByTypeIdAndTypeWithConnection(c, typeId, `type`) - } - } - - def findByTypeIdAndTypeWithConnection(c: java.sql.Connection, typeId: String, `type`: String): Option[Task] = { - findAllWithConnection(c, typeId = Some(typeId), `type` = Some(`type`), limit = Some(1L), orderBy = None).headOption - } - - def iterateAll( - ids: Option[Seq[String]] = None, - typeId: Option[String] = None, - typeIds: Option[Seq[String]] = None, - `type`: Option[String] = None, - types: Option[Seq[String]] = None, - numAttempts: Option[Int] = None, - numAttemptsGreaterThanOrEquals: Option[Int] = None, - numAttemptsGreaterThan: Option[Int] = None, - numAttemptsLessThanOrEquals: Option[Int] = None, - numAttemptsLessThan: Option[Int] = None, - nextAttemptAt: Option[DateTime] = None, - nextAttemptAtGreaterThanOrEquals: Option[DateTime] = None, - nextAttemptAtGreaterThan: Option[DateTime] = None, - nextAttemptAtLessThanOrEquals: Option[DateTime] = None, - nextAttemptAtLessThan: Option[DateTime] = None, - numAttemptsNextAttemptAts: Option[Seq[(Int, DateTime)]] = None, - typeIdTypes: Option[Seq[(String, String)]] = None, - pageSize: Long = 2000L, - ) ( - implicit customQueryModifier: Query => Query = { q => q } - ): Iterator[Task] = { - def iterate(lastValue: Option[Task]): Iterator[Task] = { - val page = findAll( - ids = ids, - typeId = typeId, - typeIds = typeIds, - `type` = `type`, - types = types, - numAttempts = numAttempts, - numAttemptsGreaterThanOrEquals = numAttemptsGreaterThanOrEquals, - numAttemptsGreaterThan = numAttemptsGreaterThan, - numAttemptsLessThanOrEquals = numAttemptsLessThanOrEquals, - numAttemptsLessThan = numAttemptsLessThan, - nextAttemptAt = nextAttemptAt, - nextAttemptAtGreaterThanOrEquals = nextAttemptAtGreaterThanOrEquals, - nextAttemptAtGreaterThan = nextAttemptAtGreaterThan, - nextAttemptAtLessThanOrEquals = nextAttemptAtLessThanOrEquals, - nextAttemptAtLessThan = nextAttemptAtLessThan, - numAttemptsNextAttemptAts = numAttemptsNextAttemptAts, - typeIdTypes = typeIdTypes, - limit = Some(pageSize), - orderBy = Some(OrderBy("tasks.id")), - ) { q => customQueryModifier(q).greaterThan("tasks.id", lastValue.map(_.id)) } - - page.lastOption match { - case None => Iterator.empty - case lastValue => page.iterator ++ iterate(lastValue) - } - } - - iterate(None) - } - - def findAll( - ids: Option[Seq[String]] = None, - typeId: Option[String] = None, - typeIds: Option[Seq[String]] = None, - `type`: Option[String] = None, - types: Option[Seq[String]] = None, - numAttempts: Option[Int] = None, - numAttemptsGreaterThanOrEquals: Option[Int] = None, - numAttemptsGreaterThan: Option[Int] = None, - numAttemptsLessThanOrEquals: Option[Int] = None, - numAttemptsLessThan: Option[Int] = None, - nextAttemptAt: Option[DateTime] = None, - nextAttemptAtGreaterThanOrEquals: Option[DateTime] = None, - nextAttemptAtGreaterThan: Option[DateTime] = None, - nextAttemptAtLessThanOrEquals: Option[DateTime] = None, - nextAttemptAtLessThan: Option[DateTime] = None, - numAttemptsNextAttemptAts: Option[Seq[(Int, DateTime)]] = None, - typeIdTypes: Option[Seq[(String, String)]] = None, - limit: Option[Long], - offset: Long = 0, - orderBy: Option[OrderBy] = Some(OrderBy("tasks.id")) - ) ( - implicit customQueryModifier: Query => Query = { q => q } - ): Seq[Task] = { - db.withConnection { c => - findAllWithConnection( - c, - ids = ids, - typeId = typeId, - typeIds = typeIds, - `type` = `type`, - types = types, - numAttempts = numAttempts, - numAttemptsGreaterThanOrEquals = numAttemptsGreaterThanOrEquals, - numAttemptsGreaterThan = numAttemptsGreaterThan, - numAttemptsLessThanOrEquals = numAttemptsLessThanOrEquals, - numAttemptsLessThan = numAttemptsLessThan, - nextAttemptAt = nextAttemptAt, - nextAttemptAtGreaterThanOrEquals = nextAttemptAtGreaterThanOrEquals, - nextAttemptAtGreaterThan = nextAttemptAtGreaterThan, - nextAttemptAtLessThanOrEquals = nextAttemptAtLessThanOrEquals, - nextAttemptAtLessThan = nextAttemptAtLessThan, - numAttemptsNextAttemptAts = numAttemptsNextAttemptAts, - typeIdTypes = typeIdTypes, - limit = limit, - offset = offset, - orderBy = orderBy - )(customQueryModifier) - } - } - - def findAllWithConnection( - c: java.sql.Connection, - ids: Option[Seq[String]] = None, - typeId: Option[String] = None, - typeIds: Option[Seq[String]] = None, - `type`: Option[String] = None, - types: Option[Seq[String]] = None, - numAttempts: Option[Int] = None, - numAttemptsGreaterThanOrEquals: Option[Int] = None, - numAttemptsGreaterThan: Option[Int] = None, - numAttemptsLessThanOrEquals: Option[Int] = None, - numAttemptsLessThan: Option[Int] = None, - nextAttemptAt: Option[DateTime] = None, - nextAttemptAtGreaterThanOrEquals: Option[DateTime] = None, - nextAttemptAtGreaterThan: Option[DateTime] = None, - nextAttemptAtLessThanOrEquals: Option[DateTime] = None, - nextAttemptAtLessThan: Option[DateTime] = None, - numAttemptsNextAttemptAts: Option[Seq[(Int, DateTime)]] = None, - typeIdTypes: Option[Seq[(String, String)]] = None, - limit: Option[Long], - offset: Long = 0, - orderBy: Option[OrderBy] = Some(OrderBy("tasks.id")) - ) ( - implicit customQueryModifier: Query => Query = { q => q } - ): Seq[Task] = { - customQueryModifier(BaseQuery). - optionalIn("tasks.id", ids). - equals("tasks.type_id", typeId). - optionalIn("tasks.type_id", typeIds). - equals("tasks.type", `type`). - optionalIn("tasks.type", types). - equals("tasks.num_attempts", numAttempts). - greaterThanOrEquals("tasks.num_attempts", numAttemptsGreaterThanOrEquals). - greaterThan("tasks.num_attempts", numAttemptsGreaterThan). - lessThanOrEquals("tasks.num_attempts", numAttemptsLessThanOrEquals). - lessThan("tasks.num_attempts", numAttemptsLessThan). - equals("tasks.next_attempt_at", nextAttemptAt). - greaterThanOrEquals("tasks.next_attempt_at", nextAttemptAtGreaterThanOrEquals). - greaterThan("tasks.next_attempt_at", nextAttemptAtGreaterThan). - lessThanOrEquals("tasks.next_attempt_at", nextAttemptAtLessThanOrEquals). - lessThan("tasks.next_attempt_at", nextAttemptAtLessThan). - optionalIn2(("tasks.num_attempts", "tasks.next_attempt_at"), numAttemptsNextAttemptAts). - optionalIn2(("tasks.type_id", "tasks.type"), typeIdTypes). - optionalLimit(limit). - offset(offset). - orderBy(orderBy.flatMap(_.sql)). - as(TasksDao.parser.*)(c) - } - -} - -object TasksDao { - - val parser: RowParser[Task] = { - SqlParser.str("id") ~ - SqlParser.str("type") ~ - SqlParser.str("type_id") ~ - SqlParser.get[UUID]("organization_guid").? ~ - SqlParser.int("num_attempts") ~ - SqlParser.get[DateTime]("next_attempt_at") ~ - SqlParser.str("errors_text").? ~ - SqlParser.str("stacktrace").? ~ - SqlParser.str("data_text") ~ - SqlParser.str("updated_by_guid") ~ - SqlParser.get[DateTime]("created_at") ~ - SqlParser.get[DateTime]("updated_at") map { - case id ~ type_ ~ typeId ~ organizationGuid ~ numAttempts ~ nextAttemptAt ~ errors ~ stacktrace ~ data ~ updatedByGuid ~ createdAt ~ updatedAt => Task( - id = id, - `type` = type_, - typeId = typeId, - organizationGuid = organizationGuid, - numAttempts = numAttempts, - nextAttemptAt = nextAttemptAt, - errors = errors.map { text => Json.parse(text).as[Seq[String]] }, - stacktrace = stacktrace, - data = Json.parse(data), - updatedByGuid = updatedByGuid, - createdAt = createdAt, - updatedAt = updatedAt - ) - } - } - -} - -@Singleton -class TasksDao @Inject() ( - override val db: Database -) extends BaseTasksDao { - - private val UpsertQuery = Query(""" - | insert into tasks - | (id, type, type_id, organization_guid, num_attempts, next_attempt_at, errors, stacktrace, data, updated_by_guid, hash_code) - | values - | ({id}, {type}, {type_id}, {organization_guid}::uuid, {num_attempts}::int, {next_attempt_at}::timestamptz, {errors}::json, {stacktrace}, {data}::json, {updated_by_guid}, {hash_code}::bigint) - | on conflict (type_id, type) - | do update - | set organization_guid = {organization_guid}::uuid, - | num_attempts = {num_attempts}::int, - | next_attempt_at = {next_attempt_at}::timestamptz, - | errors = {errors}::json, - | stacktrace = {stacktrace}, - | data = {data}::json, - | updated_by_guid = {updated_by_guid}, - | hash_code = {hash_code}::bigint - | where tasks.hash_code != {hash_code}::bigint - | returning id - """.stripMargin) - - private val UpdateQuery = Query(""" - | update tasks - | set type = {type}, - | type_id = {type_id}, - | organization_guid = {organization_guid}::uuid, - | num_attempts = {num_attempts}::int, - | next_attempt_at = {next_attempt_at}::timestamptz, - | errors = {errors}::json, - | stacktrace = {stacktrace}, - | data = {data}::json, - | updated_by_guid = {updated_by_guid}, - | hash_code = {hash_code}::bigint - | where id = {id} - | and tasks.hash_code != {hash_code}::bigint - """.stripMargin) - - private def bindQuery(query: Query, form: TaskForm): Query = { - query. - bind("type", form.`type`). - bind("type_id", form.typeId). - bind("organization_guid", form.organizationGuid). - bind("num_attempts", form.numAttempts). - bind("next_attempt_at", form.nextAttemptAt). - bind("errors", form.errors.map { v => Json.toJson(v) }). - bind("stacktrace", form.stacktrace). - bind("data", form.data). - bind("hash_code", form.hashCode()) - } - - private def toNamedParameter(updatedBy: UUID, form: TaskForm): Seq[NamedParameter] = { - Seq( - "id" -> form.id, - "type" -> form.`type`, - "type_id" -> form.typeId, - "organization_guid" -> form.organizationGuid, - "num_attempts" -> form.numAttempts, - "next_attempt_at" -> form.nextAttemptAt, - "errors" -> form.errors.map { v => Json.toJson(v).toString }, - "stacktrace" -> form.stacktrace, - "data" -> form.data.toString, - "updated_by_guid" -> updatedBy, - "hash_code" -> form.hashCode() - ) - } - - def upsertIfChangedByTypeIdAndType(updatedBy: UUID, form: TaskForm): Unit = { - if (!findByTypeIdAndType(form.typeId, form.`type`).map(_.form).contains(form)) { - upsertByTypeIdAndType(updatedBy, form) - } - } - - def upsertByTypeIdAndType(updatedBy: UUID, form: TaskForm): Unit = { - db.withConnection { c => - upsertByTypeIdAndType(c, updatedBy, form) - } - } - - def upsertByTypeIdAndType(c: Connection, updatedBy: UUID, form: TaskForm): Unit = { - bindQuery(UpsertQuery, form). - bind("id", form.id). - bind("updated_by_guid", updatedBy). - anormSql().execute()(c) - () - } - - def upsertBatchByTypeIdAndType(updatedBy: UUID, forms: Seq[TaskForm]): Unit = { - db.withConnection { c => - upsertBatchByTypeIdAndType(c, updatedBy, forms) - } - } - - def upsertBatchByTypeIdAndType(c: Connection, updatedBy: UUID, forms: Seq[TaskForm]): Unit = { - if (forms.nonEmpty) { - val params = forms.map(toNamedParameter(updatedBy, _)) - BatchSql(UpsertQuery.sql(), params.head, params.tail*).execute()(c) - () - } - } - - def updateIfChangedById(updatedBy: UUID, id: String, form: TaskForm): Unit = { - if (!findById(id).map(_.form).contains(form)) { - updateById(updatedBy, id, form) - } - } - - def updateById(updatedBy: UUID, id: String, form: TaskForm): Unit = { - db.withConnection { c => - updateById(c, updatedBy, id, form) - } - } - - def updateById(c: Connection, updatedBy: UUID, id: String, form: TaskForm): Unit = { - bindQuery(UpdateQuery, form). - bind("id", id). - bind("updated_by_guid", updatedBy). - anormSql().execute()(c) - () - } - - def update(updatedBy: UUID, existing: Task, form: TaskForm): Unit = { - db.withConnection { c => - update(c, updatedBy, existing, form) - } - } - - def update(c: Connection, updatedBy: UUID, existing: Task, form: TaskForm): Unit = { - updateById(c, updatedBy, existing.id, form) - } - - def updateBatch(updatedBy: UUID, forms: Seq[TaskForm]): Unit = { - db.withConnection { c => - updateBatchWithConnection(c, updatedBy, forms) - } - } - - def updateBatchWithConnection(c: Connection, updatedBy: UUID, forms: Seq[TaskForm]): Unit = { - if (forms.nonEmpty) { - val params = forms.map(toNamedParameter(updatedBy, _)) - BatchSql(UpdateQuery.sql(), params.head, params.tail*).execute()(c) - () - } - } - - def delete(deletedBy: UUID, task: Task): Unit = { - db.withConnection { c => - delete(c, deletedBy, task) - } - } - - def delete(c: Connection, deletedBy: UUID, task: Task): Unit = { - deleteById(c, deletedBy, task.id) - } - - def deleteById(deletedBy: UUID, id: String): Unit = { - db.withConnection { c => - deleteById(c, deletedBy, id) - } - } - - def deleteById(c: Connection, deletedBy: UUID, id: String): Unit = { - setJournalDeletedByUserId(c, deletedBy) - Query("delete from tasks") - .equals("id", id) - .anormSql().executeUpdate()(c) - () - } - - def deleteAllByIds(deletedBy: UUID, ids: Seq[String]): Unit = { - db.withConnection { c => - deleteAllByIds(c, deletedBy, ids) - } - } - - def deleteAllByIds(c: Connection, deletedBy: UUID, ids: Seq[String]): Unit = { - setJournalDeletedByUserId(c, deletedBy) - Query("delete from tasks") - .in("id", ids) - .anormSql().executeUpdate()(c) - () - } - - def deleteAllByNumAttempts(deletedBy: UUID, numAttempts: Int): Unit = { - db.withConnection { c => - deleteAllByNumAttempts(c, deletedBy, numAttempts) - } - } - - def deleteAllByNumAttempts(c: Connection, deletedBy: UUID, numAttempts: Int): Unit = { - setJournalDeletedByUserId(c, deletedBy) - Query("delete from tasks") - .equals("num_attempts", numAttempts) - .anormSql().executeUpdate()(c) - () - } - - def deleteAllByNumAttemptses(deletedBy: UUID, numAttemptses: Seq[Int]): Unit = { - db.withConnection { c => - deleteAllByNumAttemptses(c, deletedBy, numAttemptses) - } - } - - def deleteAllByNumAttemptses(c: Connection, deletedBy: UUID, numAttemptses: Seq[Int]): Unit = { - setJournalDeletedByUserId(c, deletedBy) - Query("delete from tasks") - .in("num_attempts", numAttemptses) - .anormSql().executeUpdate()(c) - () - } - - def deleteAllByNumAttemptsAndNextAttemptAt(deletedBy: UUID, numAttempts: Int, nextAttemptAt: DateTime): Unit = { - db.withConnection { c => - deleteAllByNumAttemptsAndNextAttemptAt(c, deletedBy, numAttempts, nextAttemptAt) - } - } - - def deleteAllByNumAttemptsAndNextAttemptAt(c: Connection, deletedBy: UUID, numAttempts: Int, nextAttemptAt: DateTime): Unit = { - setJournalDeletedByUserId(c, deletedBy) - Query("delete from tasks") - .equals("num_attempts", numAttempts) - .equals("next_attempt_at", nextAttemptAt) - .anormSql().executeUpdate()(c) - () - } - - def deleteAllByNumAttemptsAndNextAttemptAts(deletedBy: UUID, numAttempts: Int, nextAttemptAts: Seq[DateTime]): Unit = { - db.withConnection { c => - deleteAllByNumAttemptsAndNextAttemptAts(c, deletedBy, numAttempts, nextAttemptAts) - } - } - - def deleteAllByNumAttemptsAndNextAttemptAts(c: Connection, deletedBy: UUID, numAttempts: Int, nextAttemptAts: Seq[DateTime]): Unit = { - setJournalDeletedByUserId(c, deletedBy) - Query("delete from tasks") - .equals("num_attempts", numAttempts) - .in("next_attempt_at", nextAttemptAts) - .anormSql().executeUpdate()(c) - () - } - - def deleteAllByTypeId(deletedBy: UUID, typeId: String): Unit = { - db.withConnection { c => - deleteAllByTypeId(c, deletedBy, typeId) - } - } - - def deleteAllByTypeId(c: Connection, deletedBy: UUID, typeId: String): Unit = { - setJournalDeletedByUserId(c, deletedBy) - Query("delete from tasks") - .equals("type_id", typeId) - .anormSql().executeUpdate()(c) - () - } - - def deleteAllByTypeIds(deletedBy: UUID, typeIds: Seq[String]): Unit = { - db.withConnection { c => - deleteAllByTypeIds(c, deletedBy, typeIds) - } - } - - def deleteAllByTypeIds(c: Connection, deletedBy: UUID, typeIds: Seq[String]): Unit = { - setJournalDeletedByUserId(c, deletedBy) - Query("delete from tasks") - .in("type_id", typeIds) - .anormSql().executeUpdate()(c) - () - } - - def deleteByTypeIdAndType(deletedBy: UUID, typeId: String, `type`: String): Unit = { - db.withConnection { c => - deleteByTypeIdAndType(c, deletedBy, typeId, `type`) - } - } - - def deleteByTypeIdAndType(c: Connection, deletedBy: UUID, typeId: String, `type`: String): Unit = { - setJournalDeletedByUserId(c, deletedBy) - Query("delete from tasks") - .equals("type_id", typeId) - .equals("type", `type`) - .anormSql().executeUpdate()(c) - () - } - - def deleteAllByTypeIdAndTypes(deletedBy: UUID, typeId: String, types: Seq[String]): Unit = { - db.withConnection { c => - deleteAllByTypeIdAndTypes(c, deletedBy, typeId, types) - } - } - - def deleteAllByTypeIdAndTypes(c: Connection, deletedBy: UUID, typeId: String, types: Seq[String]): Unit = { - setJournalDeletedByUserId(c, deletedBy) - Query("delete from tasks") - .equals("type_id", typeId) - .in("type", types) - .anormSql().executeUpdate()(c) - () - } - - def setJournalDeletedByUserId(c: Connection, deletedBy: UUID): Unit = { - Query(s"SET journal.deleted_by_user_id = '${deletedBy}'").anormSql().executeUpdate()(c) - () - } - -} \ No newline at end of file diff --git a/api/app/processor/TaskProcessor.scala b/api/app/processor/TaskProcessor.scala index 230441c9a..3c52af136 100644 --- a/api/app/processor/TaskProcessor.scala +++ b/api/app/processor/TaskProcessor.scala @@ -89,11 +89,12 @@ abstract class BaseTaskProcessor( args.lockUtil.lock(s"tasks:$typ") { _ => args.dao .findAll( - `type` = Some(typ.toString), - nextAttemptAtLessThanOrEquals = Some(DateTime.now), limit = Some(Limit), orderBy = Some(OrderBy("num_attempts, next_attempt_at")) - ) + ) { q => + q.equals("type", typ.toString) + .and("next_attempt_at <= now()") + } .foreach(processRecordSafe) } } @@ -155,7 +156,7 @@ abstract class BaseTaskProcessor( } final protected def insertIfNew(c: Connection, form: TaskForm): Unit = { - if (args.dao.findByTypeIdAndTypeWithConnection(c, form.typeId, form.`type`).isEmpty) { + if (args.dao.findByTypeIdAndTypeWithConnection(c, (form.typeId, form.`type`)).isEmpty) { args.dao.upsertByTypeIdAndType(c, Constants.DefaultUserGuid, form) } } diff --git a/api/app/util/IdGenerator.scala b/api/app/util/IdGenerator.scala deleted file mode 100644 index 0f2115e5b..000000000 --- a/api/app/util/IdGenerator.scala +++ /dev/null @@ -1,11 +0,0 @@ -package util - -import java.util.UUID - -case class IdGenerator() { - - def randomId(): String = { - UUID.randomUUID().toString - } - -} diff --git a/api/app/util/SessionHelper.scala b/api/app/util/SessionHelper.scala index 51847cced..aa0d57d02 100644 --- a/api/app/util/SessionHelper.scala +++ b/api/app/util/SessionHelper.scala @@ -21,13 +21,7 @@ class SessionHelper @Inject() ( _root_.db.generated.SessionForm( id = id, userGuid = u.guid, - expiresAt = ts.plusHours(DefaultSessionExpirationHours), - createdAt = ts, - createdByGuid = u.guid, - updatedAt = ts, - updatedByGuid = u.guid, - deletedAt = None, - deletedByGuid = None + expiresAt = ts.plusHours(DefaultSessionExpirationHours) ) ) diff --git a/build.sbt b/build.sbt index cf84f70ec..f587163ae 100644 --- a/build.sbt +++ b/build.sbt @@ -80,10 +80,14 @@ lazy val generated = project .in(file("generated")) .enablePlugins(PlayScala) .settings(commonSettings*) + .settings(resolversSettings) .settings( scalacOptions ++= Seq("-deprecation:false"), libraryDependencies ++= Seq( ws, + jdbc, + "com.github.mbryzek" % "lib-query" % "0.0.5", + "com.github.mbryzek" % "lib-util" % "0.0.7", "joda-time" % "joda-time" % "2.12.7", "org.playframework.anorm" %% "anorm-postgres" % "2.7.0", "org.postgresql" % "postgresql" % "42.7.3", diff --git a/dao/spec/psql-apibuilder.json b/dao/spec/psql-apibuilder.json index 52708ca52..637842fbf 100644 --- a/dao/spec/psql-apibuilder.json +++ b/dao/spec/psql-apibuilder.json @@ -1,84 +1,121 @@ { "name": "psql-apibuilder", "attributes": [ - { "name": "scala", "value": { "version": "3.4" } } + { + "name": "scala", + "value": { + "version": "3.4", + "package": "db.generated", + "user_class": "java.util.UUID" + } + }, + { + "name": "psql", + "value": { + "pkey": "id", + "hash_code": {}, + "audit": { + "created": { + "at": { "type": "date-time-iso8601" } + }, + "updated": { + "at": { "type": "date-time-iso8601" }, + "by": { "name": "updated_by_guid", "type": "string" } + } + } + } + } ], "models": { - "generator_invocation": { + "task": { "fields": [ - { - "name": "id", - "type": "string" - }, - { - "name": "key", - "type": "string" - }, - { - "name": "organization_key", - "type": "string", - "required": false - }, - { - "name": "application_key", - "type": "string", - "required": false - } + { "name": "id", "type": "string" }, + { "name": "type", "type": "string" }, + { "name": "type_id", "type": "string" }, + { "name": "organization_guid", "type": "uuid", "required": false }, + { "name": "num_attempts", "type": "integer", "minimum": 0, "default": 0 }, + { "name": "next_attempt_at", "type": "date-time-iso8601" }, + { "name": "errors", "type": "[string]", "required": false }, + { "name": "stacktrace", "type": "string", "required": false }, + { "name": "data", "type": "json" } ], "attributes": [ { - "name": "scala", + "name": "psql", "value": { - "package": "db.generated", - "dao_user_class": "java.util.UUID", - "id_generator": { - "class": "com.mbryzek.util.IdGenerator", - "prefix": "gni" - } + "on_conflict": { + "fields": ["type_id", "type"] + }, + "indexes": [ + { "fields": ["num_attempts", "next_attempt_at"] } + ] } - }, + } + ] + }, + + "session": { + "fields": [ + { "name": "id", "type": "string" }, + { "name": "user_guid", "type": "uuid" }, + { "name": "expires_at", "type": "date-time-iso8601" } + ], + "attributes": [ { "name": "psql", "value": { - "pkey": "id" + "indexes": [ + { "fields": ["user_guid"] } + ], + "audit": { + "created": { + "at": { "type": "date-time-iso8601" }, + "by": { "name": "created_by_guid", "type": "uuid" } + }, + "updated": { + "at": { "type": "date-time-iso8601" }, + "by": { "name": "updated_by_guid", "type": "uuid" } + }, + "deleted": { + "at": { "type": "date-time-iso8601", "required": false }, + "by": { "name": "deleted_by_guid", "type": "uuid", "required": false } + } + } } } ] }, - "task": { + "generator_invocation": { "fields": [ { "name": "id", "type": "string" }, - { "name": "type", "type": "string" }, - { "name": "type_id", "type": "string" }, - { "name": "organization_guid", "type": "uuid", "required": false }, - { "name": "num_attempts", "type": "integer", "minimum": 0, "default": 0 }, - { "name": "next_attempt_at", "type": "date-time-iso8601" }, - { "name": "errors", "type": "[string]", "required": false }, - { "name": "stacktrace", "type": "string", "required": false }, - { "name": "data", "type": "json" } + { "name": "key", "type": "string" }, + { "name": "organization_key", "type": "string", "required": false }, + { "name": "application_key", "type": "string", "required": false } ], "attributes": [ { "name": "scala", "value": { - "package": "db.generated", - "dao_user_class": "java.util.UUID", - "order_by": { "optional": true } + "id_generator": { + "class": "com.mbryzek.util.IdGenerator", + "prefix": "gni" + } } }, { "name": "psql", "value": { - "pkey": "id", - "authorization": { "type": "disabled" }, - "on_conflict": { - "fields": ["type_id", "type"] - }, - "indexes": [ - { "fields": ["num_attempts", "next_attempt_at"] } - ] + "audit": { + "created": { + "at": { "type": "date-time-iso8601" } + }, + "updated": { + "at": { "type": "date-time-iso8601" }, + "by": { "name": "updated_by_guid", "type": "string" } + } + } } } ] diff --git a/generated/app/ApicollectiveApibuilderSpecV0Client.scala b/generated/app/ApicollectiveApibuilderSpecV0Client.scala index 461b713fa..a38a4435e 100644 --- a/generated/app/ApicollectiveApibuilderSpecV0Client.scala +++ b/generated/app/ApicollectiveApibuilderSpecV0Client.scala @@ -1,7 +1,7 @@ /** * Generated by API Builder - https://www.apibuilder.io * Service version: 0.16.53 - * User agent: apibuilder app.apibuilder.io/apicollective/apibuilder-spec/latest/play_2_9_scala_3_client + * User agent: apibuilder localhost 9000/apicollective/apibuilder-spec/latest/play_2_9_scala_3_client */ package io.apibuilder.spec.v0.models { @@ -1416,7 +1416,17 @@ package io.apibuilder.spec.v0.models { obj match { case x: io.apibuilder.spec.v0.models.ResponseCodeInt => play.api.libs.json.Json.obj("integer" -> play.api.libs.json.Json.obj("value" -> play.api.libs.json.JsNumber(x.value))) case x: io.apibuilder.spec.v0.models.ResponseCodeOption => play.api.libs.json.Json.obj("response_code_option" -> play.api.libs.json.JsString(x.toString)) - case x: io.apibuilder.spec.v0.models.ResponseCodeUndefinedType => sys.error(s"The type[io.apibuilder.spec.v0.models.ResponseCodeUndefinedType] should never be serialized") + case x: io.apibuilder.spec.v0.models.ResponseCodeUndefinedType => { + scala.util.Try { + // If we received a JSON object - echo it back. This is a workaround for a bug in + // serialization for unions w/out discriminators where they sometimes have the + // type wrapper and sometimes do not + play.api.libs.json.Json.parse(x.description).asInstanceOf[play.api.libs.json.JsObject] + } match { + case scala.util.Success(o) => o + case scala.util.Failure(_) => sys.error("The type[io.apibuilder.spec.v0.models.ResponseCodeUndefinedType] should never be serialized") + } + } } } @@ -1564,7 +1574,7 @@ package io.apibuilder.spec.v0 { object Constants { val Namespace = "io.apibuilder.spec.v0" - val UserAgent = "apibuilder app.apibuilder.io/apicollective/apibuilder-spec/latest/play_2_9_scala_3_client" + val UserAgent = "apibuilder localhost 9000/apicollective/apibuilder-spec/latest/play_2_9_scala_3_client" val Version = "0.16.53" val VersionMajor = 0 diff --git a/generated/app/db/GeneratorInvocationsDao.scala b/generated/app/db/GeneratorInvocationsDao.scala new file mode 100644 index 000000000..018ded529 --- /dev/null +++ b/generated/app/db/GeneratorInvocationsDao.scala @@ -0,0 +1,432 @@ +package db.generated + +case class GeneratorInvocation( + id: String, + key: String, + organizationKey: Option[String], + applicationKey: Option[String], + createdAt: org.joda.time.DateTime, + updatedAt: org.joda.time.DateTime, + updatedByGuid: String +) { + def form: GeneratorInvocationForm = { + GeneratorInvocationForm( + key = key, + organizationKey = organizationKey, + applicationKey = applicationKey, + ) + } +} + +case class GeneratorInvocationForm( + key: String, + organizationKey: Option[String], + applicationKey: Option[String] +) + +case object GeneratorInvocationsTable { + val SchemaName: String = "public" + + val TableName: String = "generator_invocations" + + val QualifiedName: String = "public.generator_invocations" + + sealed trait Column { + def name: String + } + + object Columns { + case object Id extends Column { + override val name: String = "id" + } + + case object Key extends Column { + override val name: String = "key" + } + + case object OrganizationKey extends Column { + override val name: String = "organization_key" + } + + case object ApplicationKey extends Column { + override val name: String = "application_key" + } + + case object CreatedAt extends Column { + override val name: String = "created_at" + } + + case object UpdatedAt extends Column { + override val name: String = "updated_at" + } + + case object UpdatedByGuid extends Column { + override val name: String = "updated_by_guid" + } + + case object HashCode extends Column { + override val name: String = "hash_code" + } + + val all: List[Column] = List(Id, Key, OrganizationKey, ApplicationKey, CreatedAt, UpdatedAt, UpdatedByGuid, HashCode) + } +} + +trait BaseGeneratorInvocationsDao { + import anorm.* + + import anorm.JodaParameterMetaData.* + + import anorm.postgresql.* + + def db: play.api.db.Database + + private val BaseQuery: io.flow.postgresql.Query = { + io.flow.postgresql.Query(""" + | select id, + | key, + | organization_key, + | application_key, + | created_at, + | updated_at, + | updated_by_guid, + | hash_code + | from public.generator_invocations + |""".stripMargin.stripTrailing + ) + } + + def findAll( + id: Option[String] = None, + ids: Option[Seq[String]] = None, + limit: Option[Long], + offset: Long = 0, + orderBy: Option[io.flow.postgresql.OrderBy] = None + )(implicit customQueryModifier: io.flow.postgresql.Query => io.flow.postgresql.Query = identity): Seq[GeneratorInvocation] = { + db.withConnection { c => + findAllWithConnection(c, id, ids, limit, offset, orderBy) + } + } + + def findAllWithConnection( + c: java.sql.Connection, + id: Option[String] = None, + ids: Option[Seq[String]] = None, + limit: Option[Long], + offset: Long = 0, + orderBy: Option[io.flow.postgresql.OrderBy] = None + )(implicit customQueryModifier: io.flow.postgresql.Query => io.flow.postgresql.Query = identity): Seq[GeneratorInvocation] = { + customQueryModifier(BaseQuery) + .equals("generator_invocations.id", id) + .optionalIn("generator_invocations.id", ids) + .optionalLimit(limit) + .offset(offset) + .orderBy(orderBy.flatMap(_.sql)) + .as(parser.*)(c) + } + + def iterateAll( + id: Option[String] = None, + ids: Option[Seq[String]] = None, + pageSize: Long = 1000 + )(implicit customQueryModifier: io.flow.postgresql.Query => io.flow.postgresql.Query = identity): Iterator[GeneratorInvocation] = { + assert(pageSize > 0, "pageSize must be > 0") + + def iterate(lastValue: Option[GeneratorInvocation]): Iterator[GeneratorInvocation] = { + val page: Seq[GeneratorInvocation] = db.withConnection { c => + customQueryModifier(BaseQuery) + .equals("generator_invocations.id", id) + .optionalIn("generator_invocations.id", ids) + .greaterThan("generator_invocations.id", lastValue.map(_.id)) + .orderBy("generator_invocations.id") + .limit(pageSize) + .as(parser.*)(c) + } + if (page.length >= pageSize) { + page.iterator ++ iterate(page.lastOption) + } else { + page.iterator + } + } + + iterate(None) + } + + def findById(id: String): Option[GeneratorInvocation] = { + db.withConnection { c => + findByIdWithConnection(c, id) + } + } + + def findByIdWithConnection( + c: java.sql.Connection, + id: String + ): Option[GeneratorInvocation] = { + findAllWithConnection( + c = c, + id = Some(id), + limit = Some(1) + ).headOption + } + + private val parser: anorm.RowParser[GeneratorInvocation] = { + anorm.SqlParser.str("id") ~ + anorm.SqlParser.str("key") ~ + anorm.SqlParser.str("organization_key").? ~ + anorm.SqlParser.str("application_key").? ~ + anorm.SqlParser.get[org.joda.time.DateTime]("created_at") ~ + anorm.SqlParser.get[org.joda.time.DateTime]("updated_at") ~ + anorm.SqlParser.str("updated_by_guid") ~ + anorm.SqlParser.long("hash_code") map { case id ~ key ~ organizationKey ~ applicationKey ~ createdAt ~ updatedAt ~ updatedByGuid ~ hashCode => + GeneratorInvocation( + id = id, + key = key, + organizationKey = organizationKey, + applicationKey = applicationKey, + createdAt = createdAt, + updatedAt = updatedAt, + updatedByGuid = updatedByGuid + ) + } + } +} + +class GeneratorInvocationsDao @javax.inject.Inject() (override val db: play.api.db.Database) extends BaseGeneratorInvocationsDao { + import anorm.JodaParameterMetaData.* + + import anorm.postgresql.* + + private val idGenerator: com.mbryzek.util.IdGenerator = { + com.mbryzek.util.IdGenerator("gni") + } + + def randomId: String = { + idGenerator.randomId() + } + + private val InsertQuery: io.flow.postgresql.Query = { + io.flow.postgresql.Query(""" + | insert into public.generator_invocations + | (id, key, organization_key, application_key, created_at, updated_at, updated_by_guid, hash_code) + | values + | ({id}, {key}, {organization_key}, {application_key}, {created_at}::timestamptz, {updated_at}::timestamptz, {updated_by_guid}, {hash_code}::bigint) + """.stripMargin) + } + + private val UpdateQuery: io.flow.postgresql.Query = { + io.flow.postgresql.Query(""" + | update public.generator_invocations + | set key = {key}, + | organization_key = {organization_key}, + | application_key = {application_key}, + | updated_at = {updated_at}::timestamptz, + | updated_by_guid = {updated_by_guid}, + | hash_code = {hash_code}::bigint + | where id = {id} and generator_invocations.hash_code != {hash_code}::bigint + """.stripMargin) + } + + private val DeleteQuery: io.flow.postgresql.Query = { + io.flow.postgresql.Query("delete from public.generator_invocations") + } + + def insert( + user: java.util.UUID, + form: GeneratorInvocationForm + ): String = { + db.withConnection { c => + insert(c, user, form) + } + } + + def insert( + c: java.sql.Connection, + user: java.util.UUID, + form: GeneratorInvocationForm + ): String = { + val id = randomId + bindQuery(InsertQuery, user, form) + .bind("created_at", org.joda.time.DateTime.now) + .bind("id", id) + .execute(c) + id + } + + def insertBatch( + user: java.util.UUID, + forms: Seq[GeneratorInvocationForm] + ): Seq[String] = { + db.withConnection { c => + insertBatch(c, user, forms) + } + } + + def insertBatch( + c: java.sql.Connection, + user: java.util.UUID, + forms: Seq[GeneratorInvocationForm] + ): Seq[String] = { + forms.map { f => + val id = randomId + (id, Seq(anorm.NamedParameter("created_at", org.joda.time.DateTime.now)) ++ toNamedParameter(user, id, f)) + }.toList match { + case Nil => Nil + case one :: rest => { + anorm.BatchSql(InsertQuery.sql(), one._2, rest.map(_._2)*).execute()(c) + Seq(one._1) ++ rest.map(_._1) + } + } + } + + def update( + user: java.util.UUID, + generatorInvocation: GeneratorInvocation, + form: GeneratorInvocationForm + ): Unit = { + db.withConnection { c => + update(c, user, generatorInvocation, form) + } + } + + def update( + c: java.sql.Connection, + user: java.util.UUID, + generatorInvocation: GeneratorInvocation, + form: GeneratorInvocationForm + ): Unit = { + updateById( + c = c, + user = user, + id = generatorInvocation.id, + form = form + ) + } + + def updateById( + user: java.util.UUID, + id: String, + form: GeneratorInvocationForm + ): Unit = { + db.withConnection { c => + updateById(c, user, id, form) + } + } + + def updateById( + c: java.sql.Connection, + user: java.util.UUID, + id: String, + form: GeneratorInvocationForm + ): Unit = { + bindQuery(UpdateQuery, user, form) + .bind("id", id) + .bind("updated_by_guid", user) + .execute(c) + () + } + + def updateBatch( + user: java.util.UUID, + forms: Seq[(String, GeneratorInvocationForm)] + ): Unit = { + db.withConnection { c => + updateBatch(c, user, forms) + } + } + + def updateBatch( + c: java.sql.Connection, + user: java.util.UUID, + forms: Seq[(String, GeneratorInvocationForm)] + ): Unit = { + forms.map { case (id, f) => toNamedParameter(user, id, f) }.toList match { + case Nil => // no-op + case first :: rest => anorm.BatchSql(UpdateQuery.sql(), first, rest*).execute()(c) + } + } + + def delete( + user: java.util.UUID, + generatorInvocation: GeneratorInvocation + ): Unit = { + db.withConnection { c => + delete(c, user, generatorInvocation) + } + } + + def delete( + c: java.sql.Connection, + user: java.util.UUID, + generatorInvocation: GeneratorInvocation + ): Unit = { + deleteById( + c = c, + user = user, + id = generatorInvocation.id + ) + } + + def deleteById( + user: java.util.UUID, + id: String + ): Unit = { + db.withConnection { c => + deleteById(c, user, id) + } + } + + def deleteById( + c: java.sql.Connection, + user: java.util.UUID, + id: String + ): Unit = { + DeleteQuery.equals("id", id).execute(c) + } + + def deleteAllByIds( + user: java.util.UUID, + ids: Seq[String] + ): Unit = { + db.withConnection { c => + deleteAllByIds(c, user, ids) + } + } + + def deleteAllByIds( + c: java.sql.Connection, + user: java.util.UUID, + ids: Seq[String] + ): Unit = { + DeleteQuery.in("id", ids).execute(c) + } + + private def bindQuery( + query: io.flow.postgresql.Query, + user: java.util.UUID, + form: GeneratorInvocationForm + ): io.flow.postgresql.Query = { + query + .bind("key", form.key) + .bind("organization_key", form.organizationKey) + .bind("application_key", form.applicationKey) + .bind("updated_at", org.joda.time.DateTime.now) + .bind("updated_by_guid", user) + .bind("hash_code", form.hashCode()) + } + + private def toNamedParameter( + user: java.util.UUID, + id: String, + form: GeneratorInvocationForm + ): Seq[anorm.NamedParameter] = { + Seq( + anorm.NamedParameter("id", id), + anorm.NamedParameter("key", form.key), + anorm.NamedParameter("organization_key", form.organizationKey), + anorm.NamedParameter("application_key", form.applicationKey), + anorm.NamedParameter("updated_at", org.joda.time.DateTime.now), + anorm.NamedParameter("updated_by_guid", user), + anorm.NamedParameter("hash_code", form.hashCode()) + ) + } +} \ No newline at end of file diff --git a/generated/app/db/SessionsDao.scala b/generated/app/db/SessionsDao.scala new file mode 100644 index 000000000..e4bca6e8d --- /dev/null +++ b/generated/app/db/SessionsDao.scala @@ -0,0 +1,505 @@ +package db.generated + +case class Session( + id: String, + userGuid: java.util.UUID, + expiresAt: org.joda.time.DateTime, + createdAt: org.joda.time.DateTime, + createdByGuid: java.util.UUID, + updatedAt: org.joda.time.DateTime, + updatedByGuid: java.util.UUID, + deletedAt: Option[org.joda.time.DateTime], + deletedByGuid: Option[java.util.UUID] +) { + def form: SessionForm = { + SessionForm( + id = id, + userGuid = userGuid, + expiresAt = expiresAt, + ) + } +} + +case class SessionForm( + id: String, + userGuid: java.util.UUID, + expiresAt: org.joda.time.DateTime +) + +case object SessionsTable { + val SchemaName: String = "public" + + val TableName: String = "sessions" + + val QualifiedName: String = "public.sessions" + + sealed trait Column { + def name: String + } + + object Columns { + case object Id extends Column { + override val name: String = "id" + } + + case object UserGuid extends Column { + override val name: String = "user_guid" + } + + case object ExpiresAt extends Column { + override val name: String = "expires_at" + } + + case object CreatedAt extends Column { + override val name: String = "created_at" + } + + case object CreatedByGuid extends Column { + override val name: String = "created_by_guid" + } + + case object UpdatedAt extends Column { + override val name: String = "updated_at" + } + + case object UpdatedByGuid extends Column { + override val name: String = "updated_by_guid" + } + + case object DeletedAt extends Column { + override val name: String = "deleted_at" + } + + case object DeletedByGuid extends Column { + override val name: String = "deleted_by_guid" + } + + case object HashCode extends Column { + override val name: String = "hash_code" + } + + val all: List[Column] = List(Id, UserGuid, ExpiresAt, CreatedAt, CreatedByGuid, UpdatedAt, UpdatedByGuid, DeletedAt, DeletedByGuid, HashCode) + } +} + +trait BaseSessionsDao { + import anorm.* + + import anorm.JodaParameterMetaData.* + + import anorm.postgresql.* + + def db: play.api.db.Database + + private val BaseQuery: io.flow.postgresql.Query = { + io.flow.postgresql.Query(""" + | select id, + | user_guid::text, + | expires_at, + | created_at, + | created_by_guid::text, + | updated_at, + | updated_by_guid::text, + | deleted_at, + | deleted_by_guid::text, + | hash_code + | from public.sessions + |""".stripMargin.stripTrailing + ) + } + + def findAll( + id: Option[String] = None, + ids: Option[Seq[String]] = None, + userGuid: Option[java.util.UUID] = None, + userGuids: Option[Seq[java.util.UUID]] = None, + limit: Option[Long], + offset: Long = 0, + orderBy: Option[io.flow.postgresql.OrderBy] = None + )(implicit customQueryModifier: io.flow.postgresql.Query => io.flow.postgresql.Query = identity): Seq[Session] = { + db.withConnection { c => + findAllWithConnection(c, id, ids, userGuid, userGuids, limit, offset, orderBy) + } + } + + def findAllWithConnection( + c: java.sql.Connection, + id: Option[String] = None, + ids: Option[Seq[String]] = None, + userGuid: Option[java.util.UUID] = None, + userGuids: Option[Seq[java.util.UUID]] = None, + limit: Option[Long], + offset: Long = 0, + orderBy: Option[io.flow.postgresql.OrderBy] = None + )(implicit customQueryModifier: io.flow.postgresql.Query => io.flow.postgresql.Query = identity): Seq[Session] = { + customQueryModifier(BaseQuery) + .equals("sessions.id", id) + .optionalIn("sessions.id", ids) + .equals("sessions.user_guid", userGuid) + .optionalIn("sessions.user_guid", userGuids) + .optionalLimit(limit) + .offset(offset) + .orderBy(orderBy.flatMap(_.sql)) + .as(parser.*)(c) + } + + def iterateAll( + id: Option[String] = None, + ids: Option[Seq[String]] = None, + userGuid: Option[java.util.UUID] = None, + userGuids: Option[Seq[java.util.UUID]] = None, + pageSize: Long = 1000 + )(implicit customQueryModifier: io.flow.postgresql.Query => io.flow.postgresql.Query = identity): Iterator[Session] = { + assert(pageSize > 0, "pageSize must be > 0") + + def iterate(lastValue: Option[Session]): Iterator[Session] = { + val page: Seq[Session] = db.withConnection { c => + customQueryModifier(BaseQuery) + .equals("sessions.id", id) + .optionalIn("sessions.id", ids) + .equals("sessions.user_guid", userGuid) + .optionalIn("sessions.user_guid", userGuids) + .greaterThan("sessions.id", lastValue.map(_.id)) + .orderBy("sessions.id") + .limit(pageSize) + .as(parser.*)(c) + } + if (page.length >= pageSize) { + page.iterator ++ iterate(page.lastOption) + } else { + page.iterator + } + } + + iterate(None) + } + + def findById(id: String): Option[Session] = { + db.withConnection { c => + findByIdWithConnection(c, id) + } + } + + def findByIdWithConnection( + c: java.sql.Connection, + id: String + ): Option[Session] = { + findAllWithConnection( + c = c, + id = Some(id), + limit = Some(1) + ).headOption + } + + def findAllByUserGuid(userGuid: java.util.UUID): Seq[Session] = { + db.withConnection { c => + findAllByUserGuidWithConnection(c, userGuid) + } + } + + def findAllByUserGuidWithConnection( + c: java.sql.Connection, + userGuid: java.util.UUID + ): Seq[Session] = { + findAllWithConnection( + c = c, + userGuid = Some(userGuid), + limit = None + ) + } + + private val parser: anorm.RowParser[Session] = { + anorm.SqlParser.str("id") ~ + anorm.SqlParser.str("user_guid") ~ + anorm.SqlParser.get[org.joda.time.DateTime]("expires_at") ~ + anorm.SqlParser.get[org.joda.time.DateTime]("created_at") ~ + anorm.SqlParser.str("created_by_guid") ~ + anorm.SqlParser.get[org.joda.time.DateTime]("updated_at") ~ + anorm.SqlParser.str("updated_by_guid") ~ + anorm.SqlParser.get[org.joda.time.DateTime]("deleted_at").? ~ + anorm.SqlParser.str("deleted_by_guid").? ~ + anorm.SqlParser.long("hash_code") map { case id ~ userGuid ~ expiresAt ~ createdAt ~ createdByGuid ~ updatedAt ~ updatedByGuid ~ deletedAt ~ deletedByGuid ~ hashCode => + Session( + id = id, + userGuid = java.util.UUID.fromString(userGuid), + expiresAt = expiresAt, + createdAt = createdAt, + createdByGuid = java.util.UUID.fromString(createdByGuid), + updatedAt = updatedAt, + updatedByGuid = java.util.UUID.fromString(updatedByGuid), + deletedAt = deletedAt, + deletedByGuid = deletedByGuid.map { v => java.util.UUID.fromString(v) } + ) + } + } +} + +class SessionsDao @javax.inject.Inject() (override val db: play.api.db.Database) extends BaseSessionsDao { + import anorm.JodaParameterMetaData.* + + import anorm.postgresql.* + + private val InsertQuery: io.flow.postgresql.Query = { + io.flow.postgresql.Query(""" + | insert into public.sessions + | (id, user_guid, expires_at, created_at, created_by_guid, updated_at, updated_by_guid, hash_code) + | values + | ({id}, {user_guid}::uuid, {expires_at}::timestamptz, {created_at}::timestamptz, {created_by_guid}::uuid, {updated_at}::timestamptz, {updated_by_guid}::uuid, {hash_code}::bigint) + """.stripMargin) + } + + private val UpdateQuery: io.flow.postgresql.Query = { + io.flow.postgresql.Query(""" + | update public.sessions + | set user_guid = {user_guid}::uuid, + | expires_at = {expires_at}::timestamptz, + | updated_at = {updated_at}::timestamptz, + | updated_by_guid = {updated_by_guid}::uuid, + | hash_code = {hash_code}::bigint + | where id = {id} and sessions.hash_code != {hash_code}::bigint + """.stripMargin) + } + + private val DeleteQuery: io.flow.postgresql.Query = { + io.flow.postgresql.Query("update public.sessions set deleted_at = {deleted_at}::timestamptz, deleted_by_guid = {deleted_by_guid}::uuid") + } + + def insert( + user: java.util.UUID, + form: SessionForm + ): Unit = { + db.withConnection { c => + insert(c, user, form) + } + } + + def insert( + c: java.sql.Connection, + user: java.util.UUID, + form: SessionForm + ): Unit = { + bindQuery(InsertQuery, user, form) + .bind("created_at", org.joda.time.DateTime.now) + .bind("created_by_guid", user) + .execute(c) + } + + def insertBatch( + user: java.util.UUID, + forms: Seq[SessionForm] + ): Seq[Unit] = { + db.withConnection { c => + insertBatch(c, user, forms) + } + } + + def insertBatch( + c: java.sql.Connection, + user: java.util.UUID, + forms: Seq[SessionForm] + ): Seq[Unit] = { + forms.map { f => Seq(anorm.NamedParameter("created_at", org.joda.time.DateTime.now)) ++ toNamedParameter(user, f) }.toList match { + case Nil => Nil + case one :: rest => { + anorm.BatchSql(InsertQuery.sql(), one, rest*).execute()(c) + (Seq(one) ++ rest).map { _ => () } + } + } + } + + def update( + user: java.util.UUID, + session: Session, + form: SessionForm + ): Unit = { + db.withConnection { c => + update(c, user, session, form) + } + } + + def update( + c: java.sql.Connection, + user: java.util.UUID, + session: Session, + form: SessionForm + ): Unit = { + updateById( + c = c, + user = user, + id = session.id, + form = form + ) + } + + def updateById( + user: java.util.UUID, + id: String, + form: SessionForm + ): Unit = { + db.withConnection { c => + updateById(c, user, id, form) + } + } + + def updateById( + c: java.sql.Connection, + user: java.util.UUID, + id: String, + form: SessionForm + ): Unit = { + bindQuery(UpdateQuery, user, form) + .bind("id", id) + .bind("updated_by_guid", user) + .execute(c) + () + } + + def updateBatch( + user: java.util.UUID, + forms: Seq[SessionForm] + ): Unit = { + db.withConnection { c => + updateBatch(c, user, forms) + } + } + + def updateBatch( + c: java.sql.Connection, + user: java.util.UUID, + forms: Seq[SessionForm] + ): Unit = { + forms.map { f => toNamedParameter(user, f) }.toList match { + case Nil => // no-op + case first :: rest => anorm.BatchSql(UpdateQuery.sql(), first, rest*).execute()(c) + } + } + + def delete( + user: java.util.UUID, + session: Session + ): Unit = { + db.withConnection { c => + delete(c, user, session) + } + } + + def delete( + c: java.sql.Connection, + user: java.util.UUID, + session: Session + ): Unit = { + deleteById( + c = c, + user = user, + id = session.id + ) + } + + def deleteById( + user: java.util.UUID, + id: String + ): Unit = { + db.withConnection { c => + deleteById(c, user, id) + } + } + + def deleteById( + c: java.sql.Connection, + user: java.util.UUID, + id: String + ): Unit = { + DeleteQuery.equals("id", id) + .bind("deleted_at", org.joda.time.DateTime.now) + .bind("deleted_by_guid", user) + .execute(c) + } + + def deleteAllByIds( + user: java.util.UUID, + ids: Seq[String] + ): Unit = { + db.withConnection { c => + deleteAllByIds(c, user, ids) + } + } + + def deleteAllByIds( + c: java.sql.Connection, + user: java.util.UUID, + ids: Seq[String] + ): Unit = { + DeleteQuery.in("id", ids) + .bind("deleted_at", org.joda.time.DateTime.now) + .bind("deleted_by_guid", user) + .execute(c) + } + + def deleteAllByUserGuid( + user: java.util.UUID, + userGuid: java.util.UUID + ): Unit = { + db.withConnection { c => + deleteAllByUserGuid(c, user, userGuid) + } + } + + def deleteAllByUserGuid( + c: java.sql.Connection, + user: java.util.UUID, + userGuid: java.util.UUID + ): Unit = { + DeleteQuery.equals("user_guid", userGuid) + .bind("deleted_at", org.joda.time.DateTime.now) + .bind("deleted_by_guid", user) + .execute(c) + } + + def deleteAllByUserGuids( + user: java.util.UUID, + userGuids: Seq[java.util.UUID] + ): Unit = { + db.withConnection { c => + deleteAllByUserGuids(c, user, userGuids) + } + } + + def deleteAllByUserGuids( + c: java.sql.Connection, + user: java.util.UUID, + userGuids: Seq[java.util.UUID] + ): Unit = { + DeleteQuery.in("user_guid", userGuids) + .bind("deleted_at", org.joda.time.DateTime.now) + .bind("deleted_by_guid", user) + .execute(c) + } + + private def bindQuery( + query: io.flow.postgresql.Query, + user: java.util.UUID, + form: SessionForm + ): io.flow.postgresql.Query = { + query + .bind("id", form.id) + .bind("user_guid", form.userGuid.toString) + .bind("expires_at", form.expiresAt) + .bind("updated_at", org.joda.time.DateTime.now) + .bind("updated_by_guid", user) + .bind("hash_code", form.hashCode()) + } + + private def toNamedParameter( + user: java.util.UUID, + form: SessionForm + ): Seq[anorm.NamedParameter] = { + Seq( + anorm.NamedParameter("id", form.id), + anorm.NamedParameter("user_guid", form.userGuid.toString), + anorm.NamedParameter("expires_at", form.expiresAt), + anorm.NamedParameter("updated_at", org.joda.time.DateTime.now), + anorm.NamedParameter("updated_by_guid", user.toString), + anorm.NamedParameter("hash_code", form.hashCode()) + ) + } +} \ No newline at end of file diff --git a/generated/app/db/TasksDao.scala b/generated/app/db/TasksDao.scala new file mode 100644 index 000000000..2cd74e8a2 --- /dev/null +++ b/generated/app/db/TasksDao.scala @@ -0,0 +1,740 @@ +package db.generated + +case class Task( + id: String, + `type`: String, + typeId: String, + organizationGuid: Option[java.util.UUID], + numAttempts: Int, + nextAttemptAt: org.joda.time.DateTime, + errors: Option[Seq[String]], + stacktrace: Option[String], + data: play.api.libs.json.JsValue, + createdAt: org.joda.time.DateTime, + updatedAt: org.joda.time.DateTime, + updatedByGuid: String +) { + def form: TaskForm = { + TaskForm( + id = id, + `type` = `type`, + typeId = typeId, + organizationGuid = organizationGuid, + numAttempts = numAttempts, + nextAttemptAt = nextAttemptAt, + errors = errors, + stacktrace = stacktrace, + data = data, + ) + } +} + +case class TaskForm( + id: String, + `type`: String, + typeId: String, + organizationGuid: Option[java.util.UUID], + numAttempts: Int, + nextAttemptAt: org.joda.time.DateTime, + errors: Option[Seq[String]], + stacktrace: Option[String], + data: play.api.libs.json.JsValue +) + +case object TasksTable { + val SchemaName: String = "public" + + val TableName: String = "tasks" + + val QualifiedName: String = "public.tasks" + + sealed trait Column { + def name: String + } + + object Columns { + case object Id extends Column { + override val name: String = "id" + } + + case object Type extends Column { + override val name: String = "type" + } + + case object TypeId extends Column { + override val name: String = "type_id" + } + + case object OrganizationGuid extends Column { + override val name: String = "organization_guid" + } + + case object NumAttempts extends Column { + override val name: String = "num_attempts" + } + + case object NextAttemptAt extends Column { + override val name: String = "next_attempt_at" + } + + case object Errors extends Column { + override val name: String = "errors" + } + + case object Stacktrace extends Column { + override val name: String = "stacktrace" + } + + case object Data extends Column { + override val name: String = "data" + } + + case object CreatedAt extends Column { + override val name: String = "created_at" + } + + case object UpdatedAt extends Column { + override val name: String = "updated_at" + } + + case object UpdatedByGuid extends Column { + override val name: String = "updated_by_guid" + } + + case object HashCode extends Column { + override val name: String = "hash_code" + } + + val all: List[Column] = List(Id, Type, TypeId, OrganizationGuid, NumAttempts, NextAttemptAt, Errors, Stacktrace, Data, CreatedAt, UpdatedAt, UpdatedByGuid, HashCode) + } +} + +trait BaseTasksDao { + import anorm.* + + import anorm.JodaParameterMetaData.* + + import anorm.postgresql.* + + def db: play.api.db.Database + + private val BaseQuery: io.flow.postgresql.Query = { + io.flow.postgresql.Query(""" + | select id, + | type, + | type_id, + | organization_guid::text, + | num_attempts, + | next_attempt_at, + | errors::text, + | stacktrace, + | data::text, + | created_at, + | updated_at, + | updated_by_guid, + | hash_code + | from public.tasks + |""".stripMargin.stripTrailing + ) + } + + def findAll( + id: Option[String] = None, + ids: Option[Seq[String]] = None, + typeId: Option[String] = None, + typeIds: Option[Seq[String]] = None, + typeIdAndType: Option[(String, String)] = None, + typeIdsAndTypes: Option[Seq[(String, String)]] = None, + numAttempts: Option[Int] = None, + numAttemptses: Option[Seq[Int]] = None, + numAttemptsAndNextAttemptAt: Option[(Int, org.joda.time.DateTime)] = None, + numAttemptsesAndNextAttemptAts: Option[Seq[(Int, org.joda.time.DateTime)]] = None, + limit: Option[Long], + offset: Long = 0, + orderBy: Option[io.flow.postgresql.OrderBy] = None + )(implicit customQueryModifier: io.flow.postgresql.Query => io.flow.postgresql.Query = identity): Seq[Task] = { + db.withConnection { c => + findAllWithConnection(c, id, ids, typeId, typeIds, typeIdAndType, typeIdsAndTypes, numAttempts, numAttemptses, numAttemptsAndNextAttemptAt, numAttemptsesAndNextAttemptAts, limit, offset, orderBy) + } + } + + def findAllWithConnection( + c: java.sql.Connection, + id: Option[String] = None, + ids: Option[Seq[String]] = None, + typeId: Option[String] = None, + typeIds: Option[Seq[String]] = None, + typeIdAndType: Option[(String, String)] = None, + typeIdsAndTypes: Option[Seq[(String, String)]] = None, + numAttempts: Option[Int] = None, + numAttemptses: Option[Seq[Int]] = None, + numAttemptsAndNextAttemptAt: Option[(Int, org.joda.time.DateTime)] = None, + numAttemptsesAndNextAttemptAts: Option[Seq[(Int, org.joda.time.DateTime)]] = None, + limit: Option[Long], + offset: Long = 0, + orderBy: Option[io.flow.postgresql.OrderBy] = None + )(implicit customQueryModifier: io.flow.postgresql.Query => io.flow.postgresql.Query = identity): Seq[Task] = { + customQueryModifier(BaseQuery) + .equals("tasks.id", id) + .optionalIn("tasks.id", ids) + .equals("tasks.type_id", typeId) + .optionalIn("tasks.type_id", typeIds) + .optionalIn2(("tasks.type_id", "tasks.type"), typeIdAndType.map(Seq(_))) + .optionalIn2(("tasks.type_id", "tasks.type"), typeIdsAndTypes) + .equals("tasks.num_attempts", numAttempts) + .optionalIn("tasks.num_attempts", numAttemptses) + .optionalIn2(("tasks.num_attempts", "tasks.next_attempt_at"), numAttemptsAndNextAttemptAt.map(Seq(_))) + .optionalIn2(("tasks.num_attempts", "tasks.next_attempt_at"), numAttemptsesAndNextAttemptAts) + .optionalLimit(limit) + .offset(offset) + .orderBy(orderBy.flatMap(_.sql)) + .as(parser.*)(c) + } + + def iterateAll( + id: Option[String] = None, + ids: Option[Seq[String]] = None, + typeId: Option[String] = None, + typeIds: Option[Seq[String]] = None, + typeIdAndType: Option[(String, String)] = None, + typeIdsAndTypes: Option[Seq[(String, String)]] = None, + numAttempts: Option[Int] = None, + numAttemptses: Option[Seq[Int]] = None, + numAttemptsAndNextAttemptAt: Option[(Int, org.joda.time.DateTime)] = None, + numAttemptsesAndNextAttemptAts: Option[Seq[(Int, org.joda.time.DateTime)]] = None, + pageSize: Long = 1000 + )(implicit customQueryModifier: io.flow.postgresql.Query => io.flow.postgresql.Query = identity): Iterator[Task] = { + assert(pageSize > 0, "pageSize must be > 0") + + def iterate(lastValue: Option[Task]): Iterator[Task] = { + val page: Seq[Task] = db.withConnection { c => + customQueryModifier(BaseQuery) + .equals("tasks.id", id) + .optionalIn("tasks.id", ids) + .equals("tasks.type_id", typeId) + .optionalIn("tasks.type_id", typeIds) + .optionalIn2(("tasks.type_id", "tasks.type"), typeIdAndType.map(Seq(_))) + .optionalIn2(("tasks.type_id", "tasks.type"), typeIdsAndTypes) + .equals("tasks.num_attempts", numAttempts) + .optionalIn("tasks.num_attempts", numAttemptses) + .optionalIn2(("tasks.num_attempts", "tasks.next_attempt_at"), numAttemptsAndNextAttemptAt.map(Seq(_))) + .optionalIn2(("tasks.num_attempts", "tasks.next_attempt_at"), numAttemptsesAndNextAttemptAts) + .greaterThan("tasks.id", lastValue.map(_.id)) + .orderBy("tasks.id") + .limit(pageSize) + .as(parser.*)(c) + } + if (page.length >= pageSize) { + page.iterator ++ iterate(page.lastOption) + } else { + page.iterator + } + } + + iterate(None) + } + + def findById(id: String): Option[Task] = { + db.withConnection { c => + findByIdWithConnection(c, id) + } + } + + def findByIdWithConnection( + c: java.sql.Connection, + id: String + ): Option[Task] = { + findAllWithConnection( + c = c, + id = Some(id), + limit = Some(1) + ).headOption + } + + def findAllByTypeId(typeId: String): Seq[Task] = { + db.withConnection { c => + findAllByTypeIdWithConnection(c, typeId) + } + } + + def findAllByTypeIdWithConnection( + c: java.sql.Connection, + typeId: String + ): Seq[Task] = { + findAllWithConnection( + c = c, + typeId = Some(typeId), + limit = None + ) + } + + def findByTypeIdAndType(typeIdAndType: (String, String)): Option[Task] = { + db.withConnection { c => + findByTypeIdAndTypeWithConnection(c, typeIdAndType) + } + } + + def findByTypeIdAndTypeWithConnection( + c: java.sql.Connection, + typeIdAndType: (String, String) + ): Option[Task] = { + findAllWithConnection( + c = c, + typeIdAndType = Some(typeIdAndType), + limit = Some(1) + ).headOption + } + + def findAllByNumAttempts(numAttempts: Int): Seq[Task] = { + db.withConnection { c => + findAllByNumAttemptsWithConnection(c, numAttempts) + } + } + + def findAllByNumAttemptsWithConnection( + c: java.sql.Connection, + numAttempts: Int + ): Seq[Task] = { + findAllWithConnection( + c = c, + numAttempts = Some(numAttempts), + limit = None + ) + } + + def findAllByNumAttemptsAndNextAttemptAt(numAttemptsAndNextAttemptAt: (Int, org.joda.time.DateTime)): Seq[Task] = { + db.withConnection { c => + findAllByNumAttemptsAndNextAttemptAtWithConnection(c, numAttemptsAndNextAttemptAt) + } + } + + def findAllByNumAttemptsAndNextAttemptAtWithConnection( + c: java.sql.Connection, + numAttemptsAndNextAttemptAt: (Int, org.joda.time.DateTime) + ): Seq[Task] = { + findAllWithConnection( + c = c, + numAttemptsAndNextAttemptAt = Some(numAttemptsAndNextAttemptAt), + limit = None + ) + } + + private val parser: anorm.RowParser[Task] = { + anorm.SqlParser.str("id") ~ + anorm.SqlParser.str("type") ~ + anorm.SqlParser.str("type_id") ~ + anorm.SqlParser.str("organization_guid").? ~ + anorm.SqlParser.int("num_attempts") ~ + anorm.SqlParser.get[org.joda.time.DateTime]("next_attempt_at") ~ + anorm.SqlParser.str("errors").? ~ + anorm.SqlParser.str("stacktrace").? ~ + anorm.SqlParser.str("data") ~ + anorm.SqlParser.get[org.joda.time.DateTime]("created_at") ~ + anorm.SqlParser.get[org.joda.time.DateTime]("updated_at") ~ + anorm.SqlParser.str("updated_by_guid") ~ + anorm.SqlParser.long("hash_code") map { case id ~ type_ ~ typeId ~ organizationGuid ~ numAttempts ~ nextAttemptAt ~ errors ~ stacktrace ~ data ~ createdAt ~ updatedAt ~ updatedByGuid ~ hashCode => + Task( + id = id, + `type` = type_, + typeId = typeId, + organizationGuid = organizationGuid.map { v => java.util.UUID.fromString(v) }, + numAttempts = numAttempts, + nextAttemptAt = nextAttemptAt, + errors = errors.map { v => play.api.libs.json.Json.parse(v).asInstanceOf[play.api.libs.json.JsArray].value.toSeq.map(_.asInstanceOf[play.api.libs.json.JsString].value) }, + stacktrace = stacktrace, + data = play.api.libs.json.Json.parse(data), + createdAt = createdAt, + updatedAt = updatedAt, + updatedByGuid = updatedByGuid + ) + } + } +} + +class TasksDao @javax.inject.Inject() (override val db: play.api.db.Database) extends BaseTasksDao { + import anorm.JodaParameterMetaData.* + + import anorm.postgresql.* + + private val UpsertQuery: io.flow.postgresql.Query = { + io.flow.postgresql.Query(""" + | insert into public.tasks + | (id, type, type_id, organization_guid, num_attempts, next_attempt_at, errors, stacktrace, data, created_at, updated_at, updated_by_guid, hash_code) + | values + | ({id}, {type}, {type_id}, {organization_guid}::uuid, {num_attempts}::integer, {next_attempt_at}::timestamptz, {errors}::json, {stacktrace}, {data}::json, {created_at}::timestamptz, {updated_at}::timestamptz, {updated_by_guid}, {hash_code}::bigint) + | on conflict(type_id, type) do update + | set organization_guid = {organization_guid}::uuid, + | num_attempts = {num_attempts}::integer, + | next_attempt_at = {next_attempt_at}::timestamptz, + | errors = {errors}::json, + | stacktrace = {stacktrace}, + | data = {data}::json, + | updated_at = {updated_at}::timestamptz, + | updated_by_guid = {updated_by_guid}, + | hash_code = {hash_code}::bigint + | where tasks.hash_code != {hash_code}::bigint + """.stripMargin) + } + + private val UpdateQuery: io.flow.postgresql.Query = { + io.flow.postgresql.Query(""" + | update public.tasks + | set type = {type}, + | type_id = {type_id}, + | organization_guid = {organization_guid}::uuid, + | num_attempts = {num_attempts}::integer, + | next_attempt_at = {next_attempt_at}::timestamptz, + | errors = {errors}::json, + | stacktrace = {stacktrace}, + | data = {data}::json, + | updated_at = {updated_at}::timestamptz, + | updated_by_guid = {updated_by_guid}, + | hash_code = {hash_code}::bigint + | where id = {id} and tasks.hash_code != {hash_code}::bigint + """.stripMargin) + } + + private val DeleteQuery: io.flow.postgresql.Query = { + io.flow.postgresql.Query("delete from public.tasks") + } + + def upsertByTypeIdAndType( + user: java.util.UUID, + form: TaskForm + ): Unit = { + db.withConnection { c => + upsertByTypeIdAndType(c, user, form) + } + } + + def upsertByTypeIdAndType( + c: java.sql.Connection, + user: java.util.UUID, + form: TaskForm + ): Unit = { + bindQuery(UpsertQuery, user, form) + .bind("created_at", org.joda.time.DateTime.now) + .execute(c) + } + + def upsertBatchByTypeIdAndType( + user: java.util.UUID, + forms: Seq[TaskForm] + ): Seq[Unit] = { + db.withConnection { c => + upsertBatchByTypeIdAndType(c, user, forms) + } + } + + def upsertBatchByTypeIdAndType( + c: java.sql.Connection, + user: java.util.UUID, + forms: Seq[TaskForm] + ): Seq[Unit] = { + forms.map { f => Seq(anorm.NamedParameter("created_at", org.joda.time.DateTime.now)) ++ toNamedParameter(user, f) }.toList match { + case Nil => Nil + case one :: rest => { + anorm.BatchSql(UpsertQuery.sql(), one, rest*).execute()(c) + (Seq(one) ++ rest).map { _ => () } + } + } + } + + def update( + user: java.util.UUID, + task: Task, + form: TaskForm + ): Unit = { + db.withConnection { c => + update(c, user, task, form) + } + } + + def update( + c: java.sql.Connection, + user: java.util.UUID, + task: Task, + form: TaskForm + ): Unit = { + updateById( + c = c, + user = user, + id = task.id, + form = form + ) + } + + def updateById( + user: java.util.UUID, + id: String, + form: TaskForm + ): Unit = { + db.withConnection { c => + updateById(c, user, id, form) + } + } + + def updateById( + c: java.sql.Connection, + user: java.util.UUID, + id: String, + form: TaskForm + ): Unit = { + bindQuery(UpdateQuery, user, form) + .bind("id", id) + .bind("updated_by_guid", user) + .execute(c) + () + } + + def updateBatch( + user: java.util.UUID, + forms: Seq[TaskForm] + ): Unit = { + db.withConnection { c => + updateBatch(c, user, forms) + } + } + + def updateBatch( + c: java.sql.Connection, + user: java.util.UUID, + forms: Seq[TaskForm] + ): Unit = { + forms.map { f => toNamedParameter(user, f) }.toList match { + case Nil => // no-op + case first :: rest => anorm.BatchSql(UpdateQuery.sql(), first, rest*).execute()(c) + } + } + + def delete( + user: java.util.UUID, + task: Task + ): Unit = { + db.withConnection { c => + delete(c, user, task) + } + } + + def delete( + c: java.sql.Connection, + user: java.util.UUID, + task: Task + ): Unit = { + deleteById( + c = c, + user = user, + id = task.id + ) + } + + def deleteById( + user: java.util.UUID, + id: String + ): Unit = { + db.withConnection { c => + deleteById(c, user, id) + } + } + + def deleteById( + c: java.sql.Connection, + user: java.util.UUID, + id: String + ): Unit = { + DeleteQuery.equals("id", id).execute(c) + } + + def deleteAllByIds( + user: java.util.UUID, + ids: Seq[String] + ): Unit = { + db.withConnection { c => + deleteAllByIds(c, user, ids) + } + } + + def deleteAllByIds( + c: java.sql.Connection, + user: java.util.UUID, + ids: Seq[String] + ): Unit = { + DeleteQuery.in("id", ids).execute(c) + } + + def deleteAllByTypeId( + user: java.util.UUID, + typeId: String + ): Unit = { + db.withConnection { c => + deleteAllByTypeId(c, user, typeId) + } + } + + def deleteAllByTypeId( + c: java.sql.Connection, + user: java.util.UUID, + typeId: String + ): Unit = { + DeleteQuery.equals("type_id", typeId).execute(c) + } + + def deleteAllByTypeIds( + user: java.util.UUID, + typeIds: Seq[String] + ): Unit = { + db.withConnection { c => + deleteAllByTypeIds(c, user, typeIds) + } + } + + def deleteAllByTypeIds( + c: java.sql.Connection, + user: java.util.UUID, + typeIds: Seq[String] + ): Unit = { + DeleteQuery.in("type_id", typeIds).execute(c) + } + + def deleteByTypeIdAndType( + user: java.util.UUID, + typeIdAndType: (String, String) + ): Unit = { + db.withConnection { c => + deleteByTypeIdAndType(c, user, typeIdAndType) + } + } + + def deleteByTypeIdAndType( + c: java.sql.Connection, + user: java.util.UUID, + typeIdAndType: (String, String) + ): Unit = { + DeleteQuery.in2(("type_id", "type"), Seq(typeIdAndType)).execute(c) + } + + def deleteAllByTypeIdsAndTypes( + user: java.util.UUID, + typeIdsAndTypes: Seq[(String, String)] + ): Unit = { + db.withConnection { c => + deleteAllByTypeIdsAndTypes(c, user, typeIdsAndTypes) + } + } + + def deleteAllByTypeIdsAndTypes( + c: java.sql.Connection, + user: java.util.UUID, + typeIdsAndTypes: Seq[(String, String)] + ): Unit = { + DeleteQuery.in2(("type_id", "type"), typeIdsAndTypes).execute(c) + } + + def deleteAllByNumAttempts( + user: java.util.UUID, + numAttempts: Int + ): Unit = { + db.withConnection { c => + deleteAllByNumAttempts(c, user, numAttempts) + } + } + + def deleteAllByNumAttempts( + c: java.sql.Connection, + user: java.util.UUID, + numAttempts: Int + ): Unit = { + DeleteQuery.equals("num_attempts", numAttempts).execute(c) + } + + def deleteAllByNumAttemptses( + user: java.util.UUID, + numAttemptses: Seq[Int] + ): Unit = { + db.withConnection { c => + deleteAllByNumAttemptses(c, user, numAttemptses) + } + } + + def deleteAllByNumAttemptses( + c: java.sql.Connection, + user: java.util.UUID, + numAttemptses: Seq[Int] + ): Unit = { + DeleteQuery.in("num_attempts", numAttemptses).execute(c) + } + + def deleteAllByNumAttemptsAndNextAttemptAt( + user: java.util.UUID, + numAttemptsAndNextAttemptAt: (Int, org.joda.time.DateTime) + ): Unit = { + db.withConnection { c => + deleteAllByNumAttemptsAndNextAttemptAt(c, user, numAttemptsAndNextAttemptAt) + } + } + + def deleteAllByNumAttemptsAndNextAttemptAt( + c: java.sql.Connection, + user: java.util.UUID, + numAttemptsAndNextAttemptAt: (Int, org.joda.time.DateTime) + ): Unit = { + DeleteQuery.in2(("num_attempts", "next_attempt_at"), Seq(numAttemptsAndNextAttemptAt)).execute(c) + } + + def deleteAllByNumAttemptsesAndNextAttemptAts( + user: java.util.UUID, + numAttemptsesAndNextAttemptAts: Seq[(Int, org.joda.time.DateTime)] + ): Unit = { + db.withConnection { c => + deleteAllByNumAttemptsesAndNextAttemptAts(c, user, numAttemptsesAndNextAttemptAts) + } + } + + def deleteAllByNumAttemptsesAndNextAttemptAts( + c: java.sql.Connection, + user: java.util.UUID, + numAttemptsesAndNextAttemptAts: Seq[(Int, org.joda.time.DateTime)] + ): Unit = { + DeleteQuery.in2(("num_attempts", "next_attempt_at"), numAttemptsesAndNextAttemptAts).execute(c) + } + + private def bindQuery( + query: io.flow.postgresql.Query, + user: java.util.UUID, + form: TaskForm + ): io.flow.postgresql.Query = { + query + .bind("id", form.id) + .bind("type", form.`type`) + .bind("type_id", form.typeId) + .bind("organization_guid", form.organizationGuid.map(_.toString)) + .bind("num_attempts", form.numAttempts) + .bind("next_attempt_at", form.nextAttemptAt) + .bind("errors", form.errors.map { v => play.api.libs.json.Json.toJson(v).toString }) + .bind("stacktrace", form.stacktrace) + .bind("data", play.api.libs.json.Json.toJson(form.data).toString) + .bind("updated_at", org.joda.time.DateTime.now) + .bind("updated_by_guid", user) + .bind("hash_code", form.hashCode()) + } + + private def toNamedParameter( + user: java.util.UUID, + form: TaskForm + ): Seq[anorm.NamedParameter] = { + Seq( + anorm.NamedParameter("id", form.id), + anorm.NamedParameter("type", form.`type`), + anorm.NamedParameter("type_id", form.typeId), + anorm.NamedParameter("organization_guid", form.organizationGuid.map(_.toString)), + anorm.NamedParameter("num_attempts", form.numAttempts), + anorm.NamedParameter("next_attempt_at", form.nextAttemptAt), + anorm.NamedParameter("errors", form.errors.map { v => play.api.libs.json.Json.toJson(v).toString }), + anorm.NamedParameter("stacktrace", form.stacktrace), + anorm.NamedParameter("data", play.api.libs.json.Json.toJson(form.data).toString), + anorm.NamedParameter("updated_at", org.joda.time.DateTime.now), + anorm.NamedParameter("updated_by_guid", user), + anorm.NamedParameter("hash_code", form.hashCode()) + ) + } +} \ No newline at end of file diff --git a/lib/src/main/scala/generated/ApicollectiveApibuilderSpecV0Models.scala b/lib/src/main/scala/generated/ApicollectiveApibuilderSpecV0Models.scala index 36564f7fb..c6f32fb6a 100644 --- a/lib/src/main/scala/generated/ApicollectiveApibuilderSpecV0Models.scala +++ b/lib/src/main/scala/generated/ApicollectiveApibuilderSpecV0Models.scala @@ -1,7 +1,7 @@ /** * Generated by API Builder - https://www.apibuilder.io - * Service version: 0.16.50 - * User agent: apibuilder app.apibuilder.io/apicollective/apibuilder-spec/latest/play_2_x_standalone_json + * Service version: 0.16.53 + * User agent: apibuilder localhost 9000/apicollective/apibuilder-spec/latest/play_2_x_standalone_json */ package io.apibuilder.spec.v0.models { @@ -1403,7 +1403,17 @@ package io.apibuilder.spec.v0.models { obj match { case x: io.apibuilder.spec.v0.models.ResponseCodeInt => play.api.libs.json.Json.obj("integer" -> play.api.libs.json.Json.obj("value" -> play.api.libs.json.JsNumber(x.value))) case x: io.apibuilder.spec.v0.models.ResponseCodeOption => play.api.libs.json.Json.obj("response_code_option" -> play.api.libs.json.JsString(x.toString)) - case x: io.apibuilder.spec.v0.models.ResponseCodeUndefinedType => sys.error(s"The type[io.apibuilder.spec.v0.models.ResponseCodeUndefinedType] should never be serialized") + case x: io.apibuilder.spec.v0.models.ResponseCodeUndefinedType => { + scala.util.Try { + // If we received a JSON object - echo it back. This is a workaround for a bug in + // serialization for unions w/out discriminators where they sometimes have the + // type wrapper and sometimes do not + play.api.libs.json.Json.parse(x.description).asInstanceOf[play.api.libs.json.JsObject] + } match { + case scala.util.Success(o) => o + case scala.util.Failure(_) => sys.error("The type[io.apibuilder.spec.v0.models.ResponseCodeUndefinedType] should never be serialized") + } + } } } implicit def jsonWritesApibuilderSpecResponseCode: play.api.libs.json.Writes[ResponseCode] = { diff --git a/script/lib/ask.rb b/script/lib/ask.rb new file mode 100644 index 000000000..d4f832c04 --- /dev/null +++ b/script/lib/ask.rb @@ -0,0 +1,30 @@ +class Ask + + TRUE = ['y', 'yes'] + FALSE = ['n', 'no'] + + def Ask.for_string(msg) + value = "" + while value.empty? + puts msg + value = STDIN.gets + value.strip! + end + value + end + + def Ask.for_boolean(msg) + result = nil + while result.nil? + value = Ask.for_string(msg).downcase + if TRUE.include?(value) + result = true + elsif FALSE.include?(value) + result = false + end + end + result + end + +end + diff --git a/script/lib/common.rb b/script/lib/common.rb new file mode 100755 index 000000000..1288bc265 --- /dev/null +++ b/script/lib/common.rb @@ -0,0 +1,85 @@ +Dir.glob("#{File.dirname(__FILE__)}/*.rb") + .select { |f| File.basename(f) != "common.rb" } + .each { |f| load f } + +class Verbosity + + attr_reader :enabled + + def initialize + @enabled = false + end + + def set(value) + @enabled = value ? true : false + end +end + +VERBOSITY = Verbosity.new unless defined?(VERBOSITY) + +def err(msg) + puts "" + puts "ERROR" + puts " " + msg + puts "" + exit(1) +end + +def run(cmd) + puts "==> #{cmd}" if VERBOSITY.enabled + if !system(cmd) + err("Command failed: #{cmd}") + end +end + +def assert_installed(cmd, url) + if !system("which %s > /dev/null" % cmd) + err("Please install %s: %s" % [cmd, url]) + end +end + +def assert_sem_installed + assert_installed("sem-info", "https://github.com/mbryzek/schema-evolution-manager") +end + + +def args_from_stdin(opts={}) + arrays = opts.delete(:arrays) || [] + flags = opts.delete(:flags) || [] + flags << ":v" + if !opts.empty? + err("Unexpected options: #{opts.keys}") + end + + args = {} + + i = 0 + while i < ARGV.length + name = ARGV[i].to_s.strip.sub(/^\-\-/, '').to_sym + i+=1 + if name + if flags.include?(name) + args[name] = true + else + value = ARGV[i] + i+=1 + if arrays.include?(name) + args[name] ||= [] + args[name] << value + elsif args[name] + err("Argument '#{name}' specified more than once") + elsif value.start_with?("--") + err("Argument '#{name}' missing value") + else + args[name] = value + end + end + end + end + + if args[:v] + VERBOSITY.set(args[:v]) + end + + args +end diff --git a/script/lib/tag.rb b/script/lib/tag.rb new file mode 100644 index 000000000..d342f9eec --- /dev/null +++ b/script/lib/tag.rb @@ -0,0 +1,30 @@ +class Tag + def Tag.ask + assert_sem_installed + + next_standard_tag = `sem-info tag next`.strip + next_tag = replace_hundreds(next_standard_tag) + puts "" + if Ask.for_boolean("Create new tag #{next_tag}?") + run("git tag -a -m #{next_tag} #{next_tag}") + run("git push --tags origin") + end + + `sem-info tag latest`.strip + end + + def Tag.replace_hundreds(tag) + parts = tag.split('.', 3) + if parts.length == 3 && parts.all? { |p| p.to_i.to_s == p } + if parts[2].to_i >= 100 + Tag.replace_hundreds("%s.%s.%s" % [parts[0], parts[1].to_i + 1, 0]) + elsif parts[1].to_i >= 100 + Tag.replace_hundreds("%s.%s.%s" % [parts[0] + 1, 0, 0]) + else + tag + end + else + tag + end + end +end diff --git a/script/upload b/script/update similarity index 67% rename from script/upload rename to script/update index 34305743f..8c76a0627 100755 --- a/script/upload +++ b/script/update @@ -12,7 +12,7 @@ # # Upload specific specifications # -# upload --spec organization --spec user +# upload --app organization --app user # # Upload a specific tag # @@ -30,31 +30,22 @@ # # upload --dir examples # +# Skip apibuilder update +# +# upload --no_download +# + +load File.join(File.dirname(__FILE__), "lib/common.rb") require 'pathname' require 'json' ORGANIZATION = "apicollective" -args = {} -ARGV.each_slice(2) { |pair| - key = pair[0].to_s.sub(/^\-\-/, '').to_sym - value = pair[1].to_s.strip - if args[key] - args[key] << value - elsif key == :spec - args[key] = [value] - else - args[key] = value - end -} +args = args_from_stdin(:arrays => [:app], :flags => [:no_download]) -def assert_installed(cmd, url) - if !system("which %s > /dev/null" % cmd) - puts "** ERROR: Please install %s: %s" % [cmd, url] - exit(1) - end -end +assert_installed("apibuilder", "https://github.com/apicollective/apibuilder-cli") +assert_installed("sem-info", "https://github.com/mbryzek/schema-evolution-manager") def calculate_next_tag assert_installed("sem-info", "https://github.com/mbryzek/schema-evolution-manager") @@ -66,16 +57,10 @@ end # # @param remaining List of Spec instances def resolve_dependencies(remaining, ordered=[]) - if remaining.empty? - ordered - elsif next_spec = remaining.find { |spec| resolved?(ordered, spec) } + if next_spec = remaining.find { |spec| resolved?(ordered, spec) } resolve_dependencies(remaining - [next_spec], ordered + [next_spec]) else - puts "** ERROR: Could not resolve dependencies. Remaining specifications are:" - remaining.each do |spec| - puts " - %s/%s" % [spec.organization, spec.application] - end - exit(1) + ordered + remaining end end @@ -92,11 +77,8 @@ def resolved?(specs, spec) } end -assert_installed("apibuilder", "https://github.com/apicollective/apibuilder-cli") - tag = args[:tag] || calculate_next_tag -dir = args[:dir] || 'spec' -spec_dir = Pathname.new(File.join(File.dirname(__FILE__), "/../#{dir}")).cleanpath +spec_dir = args[:dir] || Pathname.new(File.join(File.dirname(__FILE__), "/../spec")).cleanpath class Dependency @@ -112,7 +94,7 @@ class Dependency elsif md = uri.match(/^https?:\/\/www.apibuilder.io\/([^\/]+)\/([^\/]+)/) Dependency.new(md[1], md[2]) else - raise "Could not parse import uri[%s]" % uri + err("Could not parse import uri[%s]" % uri) end end @@ -141,7 +123,7 @@ class Spec (json['imports'] || []).map { |imp| Dependency.from_uri(imp['uri']) } end - def command(tag, profile=nil) + def upload_command(tag, profile=nil) cmds = [] if profile cmds << "PROFILE=%s" % profile @@ -149,7 +131,16 @@ class Spec cmds << "apibuilder upload %s %s %s --version %s" % [ORGANIZATION, @application, @path, tag] cmds.join(" ") end - + + def download_command(profile=nil) + cmds = [] + if profile + cmds << "PROFILE=%s" % profile + end + cmds << "apibuilder update --app %s" % @application + cmds.join(" ") + end + end specs = Dir.glob("#{spec_dir}/*.json").map do |path| @@ -158,31 +149,27 @@ specs = Dir.glob("#{spec_dir}/*.json").map do |path| end ordered = resolve_dependencies(specs.sort_by { |s| [s.organization, s.application] }) +filtered = args[:app].nil? ? ordered : ordered.select { |spec| args[:app].include?(spec.application) } -filtered = args[:spec].nil? ? ordered : ordered.select { |spec| args[:spec].include?(spec.application) } - -if args[:spec] && filtered.size != args[:spec].size - missing = args[:spec].select { |n| !filtered.map(&:application).include?(n) } +if args[:app] && filtered.size != args[:app].size + missing = args[:app].select { |n| !filtered.map(&:application).include?(n) } if missing.size == 1 - puts "** ERROR: Did not find spec: %s" % missing.join(", ") + msg = "** ERROR: Did not find spec: %s\n" % missing.join(", ") else - puts "** ERROR: Did not find specs: %s" % missing.join(", ") + msg = "** ERROR: Did not find specs: %s\n" % missing.join(", ") end - puts " Available specs: " - puts " " + specs.map(&:application).join("\n ") - puts "" - exit(1) + msg << " Available specs:\n" + msg << " " + specs.map(&:application).join("\n ") + err(msg) end filtered.each do |spec| - command = spec.command(tag, args[:profile]) - puts command - if !system(command) - puts "" - puts "** ERROR: Exiting as last command failed" - exit(1) - end - puts "" + run(spec.upload_command(tag, args[:profile])) end +if !args[:no_download] + filtered.each do |spec| + run(spec.download_command(args[:profile])) + end +end diff --git a/script/update_daos b/script/update_daos new file mode 100755 index 000000000..d5061f076 --- /dev/null +++ b/script/update_daos @@ -0,0 +1,65 @@ +#!/usr/bin/env ruby + +require 'pathname' + +load File.join(File.dirname(__FILE__), "lib/common.rb") + +args = args_from_stdin(:arrays => [:app], :flags => [:no_download]) + +ORGANIZATION = "apicollective" + +def path_from_root(path) + Pathname.new(File.join(File.dirname(__FILE__), "../#{path}")).cleanpath.to_s +end + +UPDATE_SCRIPT = path_from_root("script/update") +DAO_SPEC_DIR = path_from_root("dao/spec") + +def upload(args, apps) + cmd = "#{UPDATE_SCRIPT} --dir #{DAO_SPEC_DIR} --no_download" + if profile = args[:profile] + cmd << " --profile #{profile}" + end + apps.each do |app| + cmd << " --app #{app}" + end + + run cmd +end + +def download(apps, profile=nil) + apps.each do |app| + cmd = "apibuilder code #{ORGANIZATION} #{app} latest psql_scala ./generated/app/db" + if profile + cmd = "PROFILE=#{profile} #{cmd}" + end + puts cmd + run cmd + end +end + +apps = Dir.glob("#{DAO_SPEC_DIR}/*.json").map do |path| + File.basename(path).sub(/\.json$/, '') +end + +if apps.empty? + err("Did not find any .json files in #{DAO_SPEC_DIR}") +end + +filtered = if args[:app] + args[:app].each do |name| + a = apps.find { |a| a == name } + if a.nil? + err("Application '#{name}' not found in #{DAO_SPEC_DIR}") + end + end + args[:app].uniq + else + apps + end + +upload(args, filtered) + +if !args[:no_download] + download(filtered, args[:profile]) +end