diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..d30f881 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,10 @@ +version: 2 +updates: + - package-ecosystem: "gomod" + directory: "/" + schedule: + interval: "daily" + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..b5eb1f7 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,18 @@ +name: make +on: + push: + branches: + - main + pull_request: +jobs: + build: + name: test + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v4 + with: + go-version: '1.19' + - run: wget https://github.com/tinygo-org/tinygo/releases/download/v0.26.0/tinygo_0.26.0_amd64.deb + - run: sudo dpkg -i tinygo_0.26.0_amd64.deb + - run: make diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..0f76b52 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +*.wasm +.vscode diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..60be673 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 kaashyapan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..2c414a7 --- /dev/null +++ b/Makefile @@ -0,0 +1,9 @@ +all: sqlc-gen-fsharp sqlc-gen-fsharp.wasm + +sqlc-gen-fsharp: + cd plugin && go build -o ~/bin/sqlc-gen-fsharp ./main.go + +sqlc-gen-fsharp.wasm: + cd plugin && tinygo build -o sqlc-gen-fsharp.wasm -gc=leaking -scheduler=none -wasm-abi=generic -target=wasi main.go + openssl sha256 plugin/sqlc-gen-fsharp.wasm + diff --git a/Readme.md b/Readme.md new file mode 100644 index 0000000..4dde63c --- /dev/null +++ b/Readme.md @@ -0,0 +1,99 @@ +# Sqlc plugin for F# +## Codegen F# from SQL +`sqlc` is a command line program that generates type-safe database access code from SQL. +Sqlc documentation - https://sqlc.dev + +**Inputs** + - DB schema.sql file + - File containing SQL statements + - Configuration file. + +**Outputs** + - Models as F# data structures + - Queries as functions taking named-typed parameters + - Readers to decode DB response into F# data structures + + +| Target | Library | | +|-----------|-------------------|----| +|Postgres |`Npgsql.FSharp` | | +|MySql | Not supported | Models will be generated| +|Sqlite |`Fumble` | | + +## Why this ? +Type safe DB access in F# is tedious with manually written data structures.\ +SqlHydra is a great dotnet tool to generate F# boiler plate. Works great with ORMs.\ +I found I was writing a lot of custom SQL and wanted a solution that can generate 100% of the code. + +This is intended for devs who prefer to write SQL by hand. + +|SqlHydra | Sqlc| +|-----------|-------------------| +|Uses a connection to the database to generate data structures| Uses schema file and SQL files| +|Postgres, Oracle, MSSql & Sqlite | Postgres & Sqlite | +|SqlHydra.Query uses Sqlkata | Handwritten Sql | +|Wraps Microsoft.Data.SqlClient. Flexible. Bring your own ADO.net wrapper| Wraps higher level F# libraries. Opinionated. Less generated code. | +|Cannot introspect queries | Wraps the pg_query Postgres SQL parser. It syntax checks the SQL & DDL statements| +|Handwritten data structures are required for custom queries| Produces exact data structures and readers for custom queries | + + +## How to use + +- Install [Sqlc](https://docs.sqlc.dev/en/latest/overview/install.html) +- Create Schema.sql containing DDL statements. (or generate using pg_dump) +- Create Query.sql containing SQL statements with an annotation like in [docs](https://docs.sqlc.dev/en/latest/reference/query-annotations.html) + ```sql + -- name: ListAuthors :many + SELECT * FROM authors ORDER BY name; + ``` +- Create sqlc.json & configure the options + ```json + { + "version": "2", + "plugins": [ + { + "name": "fsharp", + "process": { + "cmd": "/home/ubuntu/bin/sqlc-gen-fsharp" + } + } + ], + "sql": [ + { + "engine": "postgresql", + "schema": "schema.sql", + "queries": "query.sql", + "codegen": [ + { + "out": <..target_folder...>, + "plugin": "fsharp", + "options": { + "namespace": <...Namespace...>, + "async": false, + "type_affinity": true + } + } + ] + } + ] + } + ``` +- ```sqlc generate``` + +See the test folder for a sample setup. + + + +### fsharp config options +`namespace`: The namespace to use for the generated code.\ +`out`: Output directory for generated code.\ +`emit_exact_table_names`: If true, use the exact table name for generated models. Otherwise, guess a singular form. Defaults to *false*.\ +`async`: If true, all query functions generated will be async. Defaults to *false*. +`type_affinity`: If true, all DB integers (except Bigint) will be mapped to F#int. All DB floats will be mapped to F#double. Defaults to *false*. + + +### TODO +- Support for enumerated column types. +- Postgis type support +- Optionally generate classes instead of records +- Autogenerate basic CRUD without writing SQL \ No newline at end of file diff --git a/build.sh b/build.sh new file mode 100755 index 0000000..ab03a9e --- /dev/null +++ b/build.sh @@ -0,0 +1,3 @@ +#!bash +# +docker run -it --rm -w /src -v ~/sqlc-gen-fsharp:/src tinygo/tinygo:0.27.0 tinygo build -o sqlc-gen-fsharp.wasm -target wasi plugin/main.go diff --git a/examples/authors/mysql/Models.fs b/examples/authors/mysql/Models.fs new file mode 100644 index 0000000..85122f3 --- /dev/null +++ b/examples/authors/mysql/Models.fs @@ -0,0 +1,11 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Authors + +open System + +type Author = + { Id: int64 + Name: string + Bio: string option } diff --git a/examples/authors/mysql/Queries.fs b/examples/authors/mysql/Queries.fs new file mode 100644 index 0000000..e69de29 diff --git a/examples/authors/mysql/Readers.fs b/examples/authors/mysql/Readers.fs new file mode 100644 index 0000000..e69de29 diff --git a/examples/authors/mysql/query.sql b/examples/authors/mysql/query.sql new file mode 100644 index 0000000..54d329f --- /dev/null +++ b/examples/authors/mysql/query.sql @@ -0,0 +1,18 @@ +/* name: GetAuthor :one */ +SELECT * FROM authors +WHERE id = @id LIMIT 1; + +/* name: ListAuthors :many */ +SELECT * FROM authors +ORDER BY name; + +/* name: CreateAuthor :execresult */ +INSERT INTO authors ( + name, bio +) VALUES ( + @name, @bio +); + +/* name: DeleteAuthor :exec */ +DELETE FROM authors +WHERE id = @id; diff --git a/examples/authors/mysql/schema.sql b/examples/authors/mysql/schema.sql new file mode 100644 index 0000000..581ecfe --- /dev/null +++ b/examples/authors/mysql/schema.sql @@ -0,0 +1,5 @@ +CREATE TABLE authors ( + id BIGINT PRIMARY KEY AUTO_INCREMENT, + name text NOT NULL, + bio text +); diff --git a/examples/authors/postgres/Models.fs b/examples/authors/postgres/Models.fs new file mode 100644 index 0000000..ff6dc3f --- /dev/null +++ b/examples/authors/postgres/Models.fs @@ -0,0 +1,12 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Authors + +open System +open Npgsql + +type Author = + { Id: int64 + Name: string + Bio: string option } diff --git a/examples/authors/postgres/Queries.fs b/examples/authors/postgres/Queries.fs new file mode 100644 index 0000000..0c9018f --- /dev/null +++ b/examples/authors/postgres/Queries.fs @@ -0,0 +1,85 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Authors + +open System +open Npgsql +open Npgsql.FSharp +open Authors.Readers + +module Sqls = + + [] + let createAuthor = + """ + INSERT INTO authors ( + name, bio +) VALUES ( + @name, @bio +) + """ + + [] + let deleteAuthor = + """ + DELETE FROM authors +WHERE id = @id + """ + + [] + let getAuthor = + """ + SELECT id, name, bio FROM authors +WHERE id = @id LIMIT 1 + """ + + [] + let listAuthors = + """ + SELECT id, name, bio FROM authors +ORDER BY name + """ + +[] +type DB(conn: string) = + + // https://www.connectionstrings.com/npgsql + + /// This SQL will insert a single author into the table + member this.createAuthor(name: string, ?bio: string) = + + let parameters = [ ("name", Sql.text name); ("bio", Sql.textOrNone bio) ] + + conn + |> Sql.connect + |> Sql.query Sqls.createAuthor + |> Sql.parameters parameters + |> Sql.executeNonQuery + + /// This SQL will delete a given author + member this.deleteAuthor(id: int64) = + + let parameters = [ ("id", Sql.int64 id) ] + + conn + |> Sql.connect + |> Sql.query Sqls.deleteAuthor + |> Sql.parameters parameters + |> Sql.executeNonQuery + + /// This SQL will select a single author from the table + member this.getAuthor(id: int64) = + + let parameters = [ ("id", Sql.int64 id) ] + + conn + |> Sql.connect + |> Sql.query Sqls.getAuthor + |> Sql.parameters parameters + |> Sql.executeRow authorReader + + /// This SQL will list all authors from the authors table + member this.listAuthors() = + + conn |> Sql.connect |> Sql.query Sqls.listAuthors |> Sql.execute authorReader diff --git a/examples/authors/postgres/Readers.fs b/examples/authors/postgres/Readers.fs new file mode 100644 index 0000000..c28086b --- /dev/null +++ b/examples/authors/postgres/Readers.fs @@ -0,0 +1,15 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Authors + +open System +open Npgsql +open Npgsql.FSharp + +module Readers = + + let authorReader (r: RowReader) : Author = + { Author.Id = r.int64 "id" + Name = r.text "name" + Bio = r.textOrNone "bio" } diff --git a/examples/authors/postgresql/query.sql b/examples/authors/postgresql/query.sql new file mode 100644 index 0000000..a1d5cf9 --- /dev/null +++ b/examples/authors/postgresql/query.sql @@ -0,0 +1,22 @@ +/* name: GetAuthor :one */ +/* This SQL will select a single author from the table */ +SELECT * FROM authors +WHERE id = @id LIMIT 1; + +/* name: ListAuthors :many */ +/* This SQL will list all authors from the authors table */ +SELECT * FROM authors +ORDER BY name; + +/* name: CreateAuthor :execresult */ +/* This SQL will insert a single author into the table */ +INSERT INTO authors ( + name, bio +) VALUES ( + @name, @bio +); + +/* name: DeleteAuthor :exec */ +/* This SQL will delete a given author */ +DELETE FROM authors +WHERE id = @id; diff --git a/examples/authors/postgresql/schema.sql b/examples/authors/postgresql/schema.sql new file mode 100644 index 0000000..b4fad78 --- /dev/null +++ b/examples/authors/postgresql/schema.sql @@ -0,0 +1,5 @@ +CREATE TABLE authors ( + id BIGSERIAL PRIMARY KEY, + name text NOT NULL, + bio text +); diff --git a/examples/authors/sqlc.json b/examples/authors/sqlc.json new file mode 100644 index 0000000..6ac2fc6 --- /dev/null +++ b/examples/authors/sqlc.json @@ -0,0 +1,61 @@ +{ + "version": "2", + "plugins": [ + { + "name": "fsharp", + "process": { + "cmd": "/home/ubuntu/bin/sqlc-gen-fsharp" + } + } + ], + "sql": [ + { + "schema": "postgresql/schema.sql", + "queries": "postgresql/query.sql", + "engine": "postgresql", + "codegen": [ + { + "out": "postgres", + "plugin": "fsharp", + "options": { + "namespace": "Authors", + "async": false, + "type_affinity": true + } + } + ] + }, + { + "schema": "mysql/schema.sql", + "queries": "mysql/query.sql", + "engine": "mysql", + "codegen": [ + { + "out": "mysql", + "plugin": "fsharp", + "options": { + "namespace": "Authors", + "async": false, + "type_affinity": true + } + } + ] + }, + { + "schema": "sqlite/schema.sql", + "queries": "sqlite/query.sql", + "engine": "sqlite", + "codegen": [ + { + "out": "sqlite", + "plugin": "fsharp", + "options": { + "namespace": "Authors", + "async": false, + "type_affinity": true + } + } + ] + } + ] +} \ No newline at end of file diff --git a/examples/authors/sqlite/Models.fs b/examples/authors/sqlite/Models.fs new file mode 100644 index 0000000..2428bad --- /dev/null +++ b/examples/authors/sqlite/Models.fs @@ -0,0 +1,12 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Authors + +open System +open Fumble + +type Author = + { Id: int + Name: string + Bio: string option } diff --git a/examples/authors/sqlite/Queries.fs b/examples/authors/sqlite/Queries.fs new file mode 100644 index 0000000..8310c70 --- /dev/null +++ b/examples/authors/sqlite/Queries.fs @@ -0,0 +1,83 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Authors + +open System +open Fumble +open Authors.Readers + +module Sql = Sqlite +type Sql = Sqlite + +module Sqls = + + [] + let createAuthor = + """ + INSERT INTO authors ( + name, bio +) VALUES ( + @name, @bio +) + """ + + [] + let deleteAuthor = + """ + DELETE FROM authors +WHERE id = @id + """ + + [] + let getAuthor = + """ + SELECT id, name, bio FROM authors +WHERE id = @id LIMIT 1 + """ + + [] + let listAuthors = + """ + SELECT id, name, bio FROM authors +ORDER BY name + """ + +[] +type DB(conn: string) = + + // https://www.connectionstrings.com/sqlite-net-provider + + member this.createAuthor(name: string, ?bio: string) = + + let parameters = [ ("name", Sql.string name); ("bio", Sql.stringOrNone bio) ] + + conn + |> Sql.connect + |> Sql.query Sqls.createAuthor + |> Sql.parameters parameters + |> Sql.executeNonQuery + + member this.deleteAuthor(id: int) = + + let parameters = [ ("id", Sql.int id) ] + + conn + |> Sql.connect + |> Sql.query Sqls.deleteAuthor + |> Sql.parameters parameters + |> Sql.executeNonQuery + + member this.getAuthor(id: int) = + + let parameters = [ ("id", Sql.int id) ] + + conn + |> Sql.connect + |> Sql.query Sqls.getAuthor + |> Sql.parameters parameters + |> Sql.execute authorReader + + member this.listAuthors() = + + conn |> Sql.connect |> Sql.query Sqls.listAuthors |> Sql.execute authorReader diff --git a/examples/authors/sqlite/Readers.fs b/examples/authors/sqlite/Readers.fs new file mode 100644 index 0000000..203806c --- /dev/null +++ b/examples/authors/sqlite/Readers.fs @@ -0,0 +1,16 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Authors + +open System +open Fumble + +type RowReader = SqliteRowReader + +module Readers = + + let authorReader (r: RowReader) : Author = + { Author.Id = r.int "id" + Name = r.string "name" + Bio = r.stringOrNone "bio" } diff --git a/examples/authors/sqlite/query.sql b/examples/authors/sqlite/query.sql new file mode 100644 index 0000000..54d329f --- /dev/null +++ b/examples/authors/sqlite/query.sql @@ -0,0 +1,18 @@ +/* name: GetAuthor :one */ +SELECT * FROM authors +WHERE id = @id LIMIT 1; + +/* name: ListAuthors :many */ +SELECT * FROM authors +ORDER BY name; + +/* name: CreateAuthor :execresult */ +INSERT INTO authors ( + name, bio +) VALUES ( + @name, @bio +); + +/* name: DeleteAuthor :exec */ +DELETE FROM authors +WHERE id = @id; diff --git a/examples/authors/sqlite/schema.sql b/examples/authors/sqlite/schema.sql new file mode 100644 index 0000000..9b81ece --- /dev/null +++ b/examples/authors/sqlite/schema.sql @@ -0,0 +1,5 @@ +CREATE TABLE authors ( + id integer PRIMARY KEY AUTOINCREMENT, + name text NOT NULL, + bio text +); diff --git a/examples/booktest/mysql/Models.fs b/examples/booktest/mysql/Models.fs new file mode 100644 index 0000000..8cfb368 --- /dev/null +++ b/examples/booktest/mysql/Models.fs @@ -0,0 +1,25 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Booktest + +open System + +type Author = { AuthorId: int; Name: string } + +type Book = + { BookId: int + AuthorId: int + Isbn: string + BookType: books_book_type + Title: string + Yr: int + Available: DateTime + Tags: string } + +type BooksByTagsRow = + { BookId: int + Title: string + Name: string option + Isbn: string + Tags: string } diff --git a/examples/booktest/mysql/Queries.fs b/examples/booktest/mysql/Queries.fs new file mode 100644 index 0000000..e69de29 diff --git a/examples/booktest/mysql/Readers.fs b/examples/booktest/mysql/Readers.fs new file mode 100644 index 0000000..e69de29 diff --git a/examples/booktest/mysql/query.sql b/examples/booktest/mysql/query.sql new file mode 100644 index 0000000..a455ad4 --- /dev/null +++ b/examples/booktest/mysql/query.sql @@ -0,0 +1,63 @@ +/* name: GetAuthor :one */ +SELECT * FROM authors +WHERE author_id = ?; + +/* name: GetBook :one */ +SELECT * FROM books +WHERE book_id = ?; + +/* name: DeleteBook :exec */ +DELETE FROM books +WHERE book_id = ?; + +/* name: BooksByTitleYear :many */ +SELECT * FROM books +WHERE title = ? AND yr = ?; + +/* name: BooksByTags :many */ +SELECT + book_id, + title, + name, + isbn, + tags +FROM books +LEFT JOIN authors ON books.author_id = authors.author_id +WHERE tags = ?; + +/* name: CreateAuthor :execresult */ +INSERT INTO authors (name) VALUES (?); + +/* name: CreateBook :execresult */ +INSERT INTO books ( + author_id, + isbn, + book_type, + title, + yr, + available, + tags +) VALUES ( + ?, + ?, + ?, + ?, + ?, + ?, + ? +); + +/* name: UpdateBook :exec */ +UPDATE books +SET title = ?, tags = ? +WHERE book_id = ?; + +/* name: UpdateBookISBN :exec */ +UPDATE books +SET title = ?, tags = ?, isbn = ? +WHERE book_id = ?; + +/* name: DeleteAuthorBeforeYear :exec */ +DELETE FROM books +WHERE yr < ? AND author_id = ?; +-- WHERE yr < sqlc.arg(min_publish_year) AND author_id = ?; diff --git a/examples/booktest/mysql/schema.sql b/examples/booktest/mysql/schema.sql new file mode 100644 index 0000000..e457da9 --- /dev/null +++ b/examples/booktest/mysql/schema.sql @@ -0,0 +1,26 @@ +CREATE TABLE authors ( + author_id integer NOT NULL AUTO_INCREMENT PRIMARY KEY, + name text NOT NULL +) ENGINE=InnoDB; + +CREATE INDEX authors_name_idx ON authors(name(255)); + +CREATE TABLE books ( + book_id integer NOT NULL AUTO_INCREMENT PRIMARY KEY, + author_id integer NOT NULL, + isbn varchar(255) NOT NULL DEFAULT '' UNIQUE, + book_type ENUM('FICTION', 'NONFICTION') NOT NULL DEFAULT 'FICTION', + title text NOT NULL, + yr integer NOT NULL DEFAULT 2000, + available datetime NOT NULL DEFAULT NOW(), + tags text NOT NULL + -- CONSTRAINT FOREIGN KEY (author_id) REFERENCES authors(author_id) +) ENGINE=InnoDB; + +CREATE INDEX books_title_idx ON books(title(255), yr); + +/* +CREATE FUNCTION say_hello(s text) RETURNS text + DETERMINISTIC + RETURN CONCAT('hello ', s); +*/ diff --git a/examples/booktest/postgres/Models.fs b/examples/booktest/postgres/Models.fs new file mode 100644 index 0000000..a5fdf0c --- /dev/null +++ b/examples/booktest/postgres/Models.fs @@ -0,0 +1,36 @@ + +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Booktest + +open System +open Npgsql + +type Author = + { + AuthorId : int + Name : string + } + +type Book = + { + BookId : int + AuthorId : int + Isbn : string + BookType : book_type + Title : string + Year : int + Available : DateTimeOffset + Tags : string + } + +type BooksByTagsRow = + { + BookId : int + Title : string + Name : string option + Isbn : string + Tags : List<string> + } + diff --git a/examples/booktest/postgres/Queries.fs b/examples/booktest/postgres/Queries.fs new file mode 100644 index 0000000..0082ab0 --- /dev/null +++ b/examples/booktest/postgres/Queries.fs @@ -0,0 +1,195 @@ + +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Booktest + +open System +open Npgsql +open Npgsql.FSharp +open Booktest.Readers + +module Sqls = + + [] + let booksByTags = + """ + SELECT + book_id, + title, + name, + isbn, + tags +FROM books +LEFT JOIN authors ON books.author_id = authors.author_id +WHERE tags && @::varchar[] + """ + + [] + let booksByTitleYear = + """ + SELECT book_id, author_id, isbn, book_type, title, year, available, tags FROM books +WHERE title = @title AND year = @year + """ + + [] + let createAuthor = + """ + INSERT INTO authors (name) VALUES (@name) +RETURNING author_id, name + """ + + [] + let createBook = + """ + INSERT INTO books ( + author_id, + isbn, + book_type, + title, + year, + available, + tags +) VALUES ( + @author_id, + @isbn, + @book_type, + @title, + @year, + @available, + @tags +) +RETURNING book_id, author_id, isbn, book_type, title, year, available, tags + """ + + [] + let deleteBook = + """ + DELETE FROM books +WHERE book_id = @book_id + """ + + [] + let getAuthor = + """ + SELECT author_id, name FROM authors +WHERE author_id = @author_id + """ + + [] + let getBook = + """ + SELECT book_id, author_id, isbn, book_type, title, year, available, tags FROM books +WHERE book_id = @book_id + """ + + [] + let updateBook = + """ + UPDATE books +SET title = @title, tags = @tags +WHERE book_id = @book_id + """ + + [] + let updateBookISBN = + """ + UPDATE books +SET title = @title, tags = @tags, isbn = @isbn +WHERE book_id = @book_id + """ + +[] +type DB (conn: string) = + // https://www.connectionstrings.com/npgsql + + member this.booksByTags (dollar1: List<string>) = + + let parameters = [ ("", Sql.string dollar1) ] + + conn + |> Sql.connect + |> Sql.query Sqls.booksByTags + |> Sql.parameters parameters + |> Sql.execute booksByTagsRowReader + + member this.booksByTitleYear (title: string, year: int) = + + let parameters = [ ("title", Sql.text title); ("year", Sql.int year) ] + + conn + |> Sql.connect + |> Sql.query Sqls.booksByTitleYear + |> Sql.parameters parameters + |> Sql.execute bookReader + + member this.createAuthor (name: string) = + + let parameters = [ ("name", Sql.text name) ] + + conn + |> Sql.connect + |> Sql.query Sqls.createAuthor + |> Sql.parameters parameters + |> Sql.executeRow authorReader + + member this.createBook (authorId: int, isbn: string, bookType: book_type, title: string, year: int, available: DateTimeOffset, tags: List<string>) = + + let parameters = [ ("author_id", Sql.int authorId); ("isbn", Sql.text isbn); ("book_type", Sql.unhandled_report_issue bookType); ("title", Sql.text title); ("year", Sql.int year); ("available", Sql.timestamptz available); ("tags", Sql.string tags) ] + + conn + |> Sql.connect + |> Sql.query Sqls.createBook + |> Sql.parameters parameters + |> Sql.executeRow bookReader + + member this.deleteBook (bookId: int) = + + let parameters = [ ("book_id", Sql.int bookId) ] + + conn + |> Sql.connect + |> Sql.query Sqls.deleteBook + |> Sql.parameters parameters + |> Sql.executeNonQuery + + member this.getAuthor (authorId: int) = + + let parameters = [ ("author_id", Sql.int authorId) ] + + conn + |> Sql.connect + |> Sql.query Sqls.getAuthor + |> Sql.parameters parameters + |> Sql.executeRow authorReader + + member this.getBook (bookId: int) = + + let parameters = [ ("book_id", Sql.int bookId) ] + + conn + |> Sql.connect + |> Sql.query Sqls.getBook + |> Sql.parameters parameters + |> Sql.executeRow bookReader + + member this.updateBook (title: string, tags: List<string>, bookId: int) = + + let parameters = [ ("title", Sql.text title); ("tags", Sql.string tags); ("book_id", Sql.int bookId) ] + + conn + |> Sql.connect + |> Sql.query Sqls.updateBook + |> Sql.parameters parameters + |> Sql.executeNonQuery + + member this.updateBookISBN (title: string, tags: List<string>, bookId: int, isbn: string) = + + let parameters = [ ("title", Sql.text title); ("tags", Sql.string tags); ("book_id", Sql.int bookId); ("isbn", Sql.text isbn) ] + + conn + |> Sql.connect + |> Sql.query Sqls.updateBookISBN + |> Sql.parameters parameters + |> Sql.executeNonQuery + diff --git a/examples/booktest/postgres/Readers.fs b/examples/booktest/postgres/Readers.fs new file mode 100644 index 0000000..268b0d7 --- /dev/null +++ b/examples/booktest/postgres/Readers.fs @@ -0,0 +1,31 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Booktest + +open System +open Npgsql +open Npgsql.FSharp + +module Readers = + + let bookReader (r: RowReader) : Book = + { Book.BookId = r.int "book_id" + AuthorId = r.int "author_id" + Isbn = r.text "isbn" + BookType = r.unhandled_report_issue "book_type" + Title = r.text "title" + Year = r.int "year" + Available = r.datetimeOffset "available" + Tags = r.string "tags" } + + let authorReader (r: RowReader) : Author = + { Author.AuthorId = r.int "author_id" + Name = r.text "name" } + + let booksByTagsRowReader (r: RowReader) : BooksByTagsRow = + { BooksByTagsRow.BookId = r.int "book_id" + Title = r.text "title" + Name = r.textOrNone "name" + Isbn = r.text "isbn" + Tags = r.string "tags" } diff --git a/examples/booktest/postgresql/query.sql b/examples/booktest/postgresql/query.sql new file mode 100644 index 0000000..194897a --- /dev/null +++ b/examples/booktest/postgresql/query.sql @@ -0,0 +1,60 @@ +-- name: GetAuthor :one +SELECT * FROM authors +WHERE author_id = $1; + +-- name: GetBook :one +SELECT * FROM books +WHERE book_id = $1; + +-- name: DeleteBook :exec +DELETE FROM books +WHERE book_id = $1; + +-- name: BooksByTitleYear :many +SELECT * FROM books +WHERE title = $1 AND year = $2; + +-- name: BooksByTags :many +SELECT + book_id, + title, + name, + isbn, + tags +FROM books +LEFT JOIN authors ON books.author_id = authors.author_id +WHERE tags && $1::varchar[]; + +-- name: CreateAuthor :one +INSERT INTO authors (name) VALUES ($1) +RETURNING *; + +-- name: CreateBook :one +INSERT INTO books ( + author_id, + isbn, + book_type, + title, + year, + available, + tags +) VALUES ( + $1, + $2, + $3, + $4, + $5, + $6, + $7 +) +RETURNING *; + +-- name: UpdateBook :exec +UPDATE books +SET title = $1, tags = $2 +WHERE book_id = $3; + +-- name: UpdateBookISBN :exec +UPDATE books +SET title = $1, tags = $2, isbn = $4 +WHERE book_id = $3; diff --git a/examples/booktest/postgresql/schema.sql b/examples/booktest/postgresql/schema.sql new file mode 100644 index 0000000..2beecab --- /dev/null +++ b/examples/booktest/postgresql/schema.sql @@ -0,0 +1,32 @@ +CREATE TABLE authors ( + author_id SERIAL PRIMARY KEY, + name text NOT NULL DEFAULT '' +); + +CREATE INDEX authors_name_idx ON authors(name); + +CREATE TYPE book_type AS ENUM ( + 'FICTION', + 'NONFICTION' +); + +CREATE TABLE books ( + book_id SERIAL PRIMARY KEY, + author_id integer NOT NULL REFERENCES authors(author_id), + isbn text NOT NULL DEFAULT '' UNIQUE, + book_type book_type NOT NULL DEFAULT 'FICTION', + title text NOT NULL DEFAULT '', + year integer NOT NULL DEFAULT 2000, + available timestamp with time zone NOT NULL DEFAULT 'NOW()', + tags varchar[] NOT NULL DEFAULT '{}' +); + +CREATE INDEX books_title_idx ON books(title, year); + +CREATE FUNCTION say_hello(text) RETURNS text AS $$ +BEGIN + RETURN CONCAT('hello ', $1); +END; +$$ LANGUAGE plpgsql; + +CREATE INDEX books_title_lower_idx ON books(title); diff --git a/examples/booktest/sqlc.json b/examples/booktest/sqlc.json new file mode 100644 index 0000000..beb4703 --- /dev/null +++ b/examples/booktest/sqlc.json @@ -0,0 +1,61 @@ +{ + "version": "2", + "plugins": [ + { + "name": "fsharp", + "process": { + "cmd": "/home/ubuntu/bin/sqlc-gen-fsharp" + } + } + ], + "sql": [ + { + "schema": "postgresql/schema.sql", + "queries": "postgresql/query.sql", + "engine": "postgresql", + "codegen": [ + { + "out": "postgres", + "plugin": "fsharp", + "options": { + "namespace": "Booktest", + "async": false, + "type_affinity": true + } + } + ] + }, + { + "schema": "mysql/schema.sql", + "queries": "mysql/query.sql", + "engine": "mysql", + "codegen": [ + { + "out": "mysql", + "plugin": "fsharp", + "options": { + "namespace": "Booktest", + "async": false, + "type_affinity": true + } + } + ] + }, + { + "schema": "sqlite/schema.sql", + "queries": "sqlite/query.sql", + "engine": "sqlite", + "codegen": [ + { + "out": "sqlite", + "plugin": "fsharp", + "options": { + "namespace": "Booktest", + "async": false, + "type_affinity": true + } + } + ] + } + ] +} \ No newline at end of file diff --git a/examples/booktest/sqlite/Models.fs b/examples/booktest/sqlite/Models.fs new file mode 100644 index 0000000..c010e79 --- /dev/null +++ b/examples/booktest/sqlite/Models.fs @@ -0,0 +1,26 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Booktest + +open System +open Fumble + +type Author = { AuthorId: int; Name: string } + +type Book = + { BookId: int + AuthorId: int + Isbn: string + BookType: string + Title: string + Yr: int + Available: DateTime + Tags: string } + +type BooksByTagsRow = + { BookId: int + Title: string + Name: string + Isbn: string + Tags: string } diff --git a/examples/booktest/sqlite/Queries.fs b/examples/booktest/sqlite/Queries.fs new file mode 100644 index 0000000..75955f7 --- /dev/null +++ b/examples/booktest/sqlite/Queries.fs @@ -0,0 +1,233 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Booktest + +open System +open Fumble +open Booktest.Readers + +module Sql = Sqlite +type Sql = Sqlite + +module Sqls = + + [] + let booksByTags = + """ + SELECT + book_id, + title, + name, + isbn, + tags +FROM books +LEFT JOIN authors ON books.author_id = authors.author_id +WHERE tags = ? + """ + + [] + let booksByTitleYear = + """ + SELECT book_id, author_id, isbn, book_type, title, yr, available, tags FROM books +WHERE title = ? AND yr = ? + """ + + [] + let createAuthor = + """ + INSERT INTO authors (name) VALUES (?) + """ + + [] + let createBook = + """ + INSERT INTO books ( + author_id, + isbn, + book_type, + title, + yr, + available, + tags +) VALUES ( + ?, + ?, + ?, + ?, + ?, + ?, + ? +) + """ + + [] + let deleteAuthorBeforeYear = + """ + DELETE FROM books +WHERE yr < ? AND author_id = ? + """ + + [] + let deleteBook = + """ + DELETE FROM books +WHERE book_id = ? + """ + + [] + let getAuthor = + """ + SELECT author_id, name FROM authors +WHERE author_id = ? + """ + + [] + let getBook = + """ + SELECT book_id, author_id, isbn, book_type, title, yr, available, tags FROM books +WHERE book_id = ? + """ + + [] + let updateBook = + """ + UPDATE books +SET title = ?, tags = ? +WHERE book_id = ? + """ + + [] + let updateBookISBN = + """ + UPDATE books +SET title = ?, tags = ?, isbn = ? +WHERE book_id = ? + """ + +[] +type DB(conn: string) = + // https://www.connectionstrings.com/sqlite-net-provider + + member this.booksByTags(tags: string) = + + let parameters = [ ("tags", Sql.string tags) ] + + conn + |> Sql.connect + |> Sql.query Sqls.booksByTags + |> Sql.parameters parameters + |> Sql.execute booksByTagsRowReader + + member this.booksByTitleYear(title: string, yr: int) = + + let parameters = [ ("title", Sql.string title); ("yr", Sql.int yr) ] + + conn + |> Sql.connect + |> Sql.query Sqls.booksByTitleYear + |> Sql.parameters parameters + |> Sql.execute bookReader + + member this.createAuthor(name: string) = + + let parameters = [ ("name", Sql.string name) ] + + conn + |> Sql.connect + |> Sql.query Sqls.createAuthor + |> Sql.parameters parameters + |> Sql.executeNonQuery + + member this.createBook + ( + authorId: int, + isbn: string, + bookType: string, + title: string, + yr: int, + available: DateTime, + tags: string + ) = + + let parameters = + [ ("author_id", Sql.int authorId) + ("isbn", Sql.string isbn) + ("book_type", Sql.string bookType) + ("title", Sql.string title) + ("yr", Sql.int yr) + ("available", Sql.dateTime available) + ("tags", Sql.string tags) ] + + conn + |> Sql.connect + |> Sql.query Sqls.createBook + |> Sql.parameters parameters + |> Sql.executeNonQuery + + member this.deleteAuthorBeforeYear(yr: int, authorId: int) = + + let parameters = [ ("yr", Sql.int yr); ("author_id", Sql.int authorId) ] + + conn + |> Sql.connect + |> Sql.query Sqls.deleteAuthorBeforeYear + |> Sql.parameters parameters + |> Sql.executeNonQuery + + member this.deleteBook(bookId: int) = + + let parameters = [ ("book_id", Sql.int bookId) ] + + conn + |> Sql.connect + |> Sql.query Sqls.deleteBook + |> Sql.parameters parameters + |> Sql.executeNonQuery + + member this.getAuthor(authorId: int) = + + let parameters = [ ("author_id", Sql.int authorId) ] + + conn + |> Sql.connect + |> Sql.query Sqls.getAuthor + |> Sql.parameters parameters + |> Sql.execute authorReader + + member this.getBook(bookId: int) = + + let parameters = [ ("book_id", Sql.int bookId) ] + + conn + |> Sql.connect + |> Sql.query Sqls.getBook + |> Sql.parameters parameters + |> Sql.execute bookReader + + member this.updateBook(title: string, tags: string, bookId: int) = + + let parameters = + [ ("title", Sql.string title) + ("tags", Sql.string tags) + ("book_id", Sql.int bookId) ] + + conn + |> Sql.connect + |> Sql.query Sqls.updateBook + |> Sql.parameters parameters + |> Sql.executeNonQuery + + member this.updateBookISBN(title: string, tags: string, isbn: string, bookId: int) = + + let parameters = + [ ("title", Sql.string title) + ("tags", Sql.string tags) + ("isbn", Sql.string isbn) + ("book_id", Sql.int bookId) ] + + conn + |> Sql.connect + |> Sql.query Sqls.updateBookISBN + |> Sql.parameters parameters + |> Sql.executeNonQuery diff --git a/examples/booktest/sqlite/Readers.fs b/examples/booktest/sqlite/Readers.fs new file mode 100644 index 0000000..2a1bec6 --- /dev/null +++ b/examples/booktest/sqlite/Readers.fs @@ -0,0 +1,32 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Booktest + +open System +open Fumble + +type RowReader = SqliteRowReader + +module Readers = + + let bookReader (r: RowReader) : Book = + { Book.BookId = r.int "book_id" + AuthorId = r.int "author_id" + Isbn = r.string "isbn" + BookType = r.string "book_type" + Title = r.string "title" + Yr = r.int "yr" + Available = r.dateTime "available" + Tags = r.string "tags" } + + let authorReader (r: RowReader) : Author = + { Author.AuthorId = r.int "author_id" + Name = r.string "name" } + + let booksByTagsRowReader (r: RowReader) : BooksByTagsRow = + { BooksByTagsRow.BookId = r.int "book_id" + Title = r.string "title" + Name = r.string "name" + Isbn = r.string "isbn" + Tags = r.string "tags" } diff --git a/examples/booktest/sqlite/query.sql b/examples/booktest/sqlite/query.sql new file mode 100644 index 0000000..dd4f304 --- /dev/null +++ b/examples/booktest/sqlite/query.sql @@ -0,0 +1,62 @@ +/* name: GetAuthor :one */ +SELECT * FROM authors +WHERE author_id = ?; + +/* name: GetBook :one */ +SELECT * FROM books +WHERE book_id = ?; + +/* name: DeleteBook :exec */ +DELETE FROM books +WHERE book_id = ?; + +/* name: BooksByTitleYear :many */ +SELECT * FROM books +WHERE title = ? AND yr = ?; + +/* name: BooksByTags :many */ +SELECT + book_id, + title, + name, + isbn, + tags +FROM books +LEFT JOIN authors ON books.author_id = authors.author_id +WHERE tags = ?; + +/* name: CreateAuthor :execresult */ +INSERT INTO authors (name) VALUES (?); + +/* name: CreateBook :execresult */ +INSERT INTO books ( + author_id, + isbn, + book_type, + title, + yr, + available, + tags +) VALUES ( + ?, + ?, + ?, + ?, + ?, + ?, + ? +); + +/* name: UpdateBook :exec */ +UPDATE books +SET title = ?, tags = ? +WHERE book_id = ?; + +/* name: UpdateBookISBN :exec */ +UPDATE books +SET title = ?, tags = ?, isbn = ? +WHERE book_id = ?; + +/* name: DeleteAuthorBeforeYear :exec */ +DELETE FROM books +WHERE yr < ? AND author_id = ?; diff --git a/examples/booktest/sqlite/schema.sql b/examples/booktest/sqlite/schema.sql new file mode 100644 index 0000000..1176dce --- /dev/null +++ b/examples/booktest/sqlite/schema.sql @@ -0,0 +1,20 @@ +CREATE TABLE authors ( + author_id integer NOT NULL PRIMARY KEY AUTOINCREMENT, + name text NOT NULL +); + +CREATE INDEX authors_name_idx ON authors(name); + +CREATE TABLE books ( + book_id integer NOT NULL PRIMARY KEY AUTOINCREMENT, + author_id integer NOT NULL, + isbn varchar(255) NOT NULL DEFAULT '' UNIQUE, + book_type text NOT NULL DEFAULT 'FICTION', + title text NOT NULL, + yr integer NOT NULL DEFAULT 2000, + available datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, + tags text NOT NULL, + CHECK (book_type = 'FICTION' OR book_type = 'NONFICTION') +); + +CREATE INDEX books_title_idx ON books(title, yr); diff --git a/examples/jets/README.md b/examples/jets/README.md new file mode 100644 index 0000000..0f8d995 --- /dev/null +++ b/examples/jets/README.md @@ -0,0 +1,3 @@ +This database schema and query selection is taken from the +[SQLBoiler](https://github.com/volatiletech/sqlboiler#features--examples) +README. diff --git a/examples/jets/postgres/Models.fs b/examples/jets/postgres/Models.fs new file mode 100644 index 0000000..3434620 --- /dev/null +++ b/examples/jets/postgres/Models.fs @@ -0,0 +1,20 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Jets + +open System +open Npgsql + +type Pilot = { Id: int; Name: string } + +type Jet = + { Id: int + PilotId: int + Age: int + Name: string + Color: string } + +type Language = { Id: int; Language: string } + +type PilotLanguage = { PilotId: int; LanguageId: int } diff --git a/examples/jets/postgres/Queries.fs b/examples/jets/postgres/Queries.fs new file mode 100644 index 0000000..cff473b --- /dev/null +++ b/examples/jets/postgres/Queries.fs @@ -0,0 +1,51 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Jets + +open System +open Npgsql +open Npgsql.FSharp +open Jets.Readers + +module Sqls = + + [] + let countPilots = + """ + SELECT COUNT(*) FROM pilots + """ + + [] + let deletePilot = + """ + DELETE FROM pilots WHERE id = @id + """ + + [] + let listPilots = + """ + SELECT id, name FROM pilots LIMIT 5 + """ + +[] +type DB(conn: string) = + // https://www.connectionstrings.com/npgsql + + member this.countPilots() = + + conn |> Sql.connect |> Sql.query Sqls.countPilots |> Sql.executeRow int64Reader + + member this.deletePilot(id: int) = + + let parameters = [ ("id", Sql.int id) ] + + conn + |> Sql.connect + |> Sql.query Sqls.deletePilot + |> Sql.parameters parameters + |> Sql.executeNonQuery + + member this.listPilots() = + + conn |> Sql.connect |> Sql.query Sqls.listPilots |> Sql.execute pilotReader diff --git a/examples/jets/postgres/Readers.fs b/examples/jets/postgres/Readers.fs new file mode 100644 index 0000000..fb51d51 --- /dev/null +++ b/examples/jets/postgres/Readers.fs @@ -0,0 +1,14 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Jets + +open System +open Npgsql +open Npgsql.FSharp + +module Readers = + + let pilotReader (r: RowReader) : Pilot = + { Pilot.Id = r.int "id" + Name = r.text "name" } diff --git a/examples/jets/query-building.sql b/examples/jets/query-building.sql new file mode 100644 index 0000000..ede8952 --- /dev/null +++ b/examples/jets/query-building.sql @@ -0,0 +1,8 @@ +-- name: CountPilots :one +SELECT COUNT(*) FROM pilots; + +-- name: ListPilots :many +SELECT * FROM pilots LIMIT 5; + +-- name: DeletePilot :exec +DELETE FROM pilots WHERE id = $1; diff --git a/examples/jets/schema.sql b/examples/jets/schema.sql new file mode 100644 index 0000000..2cc4aca --- /dev/null +++ b/examples/jets/schema.sql @@ -0,0 +1,35 @@ +CREATE TABLE pilots ( + id integer NOT NULL, + name text NOT NULL +); + +ALTER TABLE pilots ADD CONSTRAINT pilot_pkey PRIMARY KEY (id); + +CREATE TABLE jets ( + id integer NOT NULL, + pilot_id integer NOT NULL, + age integer NOT NULL, + name text NOT NULL, + color text NOT NULL +); + +ALTER TABLE jets ADD CONSTRAINT jet_pkey PRIMARY KEY (id); +ALTER TABLE jets ADD CONSTRAINT jet_pilots_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id); + +CREATE TABLE languages ( + id integer NOT NULL, + language text NOT NULL +); + +ALTER TABLE languages ADD CONSTRAINT language_pkey PRIMARY KEY (id); + +-- Join table +CREATE TABLE pilot_languages ( + pilot_id integer NOT NULL, + language_id integer NOT NULL +); + +-- Composite primary key +ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_pkey PRIMARY KEY (pilot_id, language_id); +ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_pilots_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id); +ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_languages_fkey FOREIGN KEY (language_id) REFERENCES languages(id); diff --git a/examples/jets/sqlc.json b/examples/jets/sqlc.json new file mode 100644 index 0000000..789b34b --- /dev/null +++ b/examples/jets/sqlc.json @@ -0,0 +1,29 @@ +{ + "version": "2", + "plugins": [ + { + "name": "fsharp", + "process": { + "cmd": "/home/ubuntu/bin/sqlc-gen-fsharp" + } + } + ], + "sql": [ + { + "schema": "schema.sql", + "queries": "query-building.sql", + "engine": "postgresql", + "codegen": [ + { + "out": "postgres", + "plugin": "fsharp", + "options": { + "namespace": "Jets", + "async": false, + "type_affinity": true + } + } + ] + } + ] +} diff --git a/examples/ondeck/mysql/Models.fs b/examples/ondeck/mysql/Models.fs new file mode 100644 index 0000000..076ff57 --- /dev/null +++ b/examples/ondeck/mysql/Models.fs @@ -0,0 +1,22 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Ondeck + +open System + +type City = { Slug: string; Name: string } + +type Venue = + { Id: int64 + Status: venues_status + Statuses: string option + Slug: string + Name: string + City: string + SpotifyPlaylist: string + SongkickId: string option + Tags: string option + CreatedAt: DateTimeOffset } + +type VenueCountByCityRow = { City: string; Count: int64 } diff --git a/examples/ondeck/mysql/Queries.fs b/examples/ondeck/mysql/Queries.fs new file mode 100644 index 0000000..e69de29 diff --git a/examples/ondeck/mysql/Readers.fs b/examples/ondeck/mysql/Readers.fs new file mode 100644 index 0000000..e69de29 diff --git a/examples/ondeck/mysql/query/city.sql b/examples/ondeck/mysql/query/city.sql new file mode 100644 index 0000000..c387e9d --- /dev/null +++ b/examples/ondeck/mysql/query/city.sql @@ -0,0 +1,23 @@ +/* name: ListCities :many */ +SELECT * +FROM city +ORDER BY name; + +/* name: GetCity :one */ +SELECT * +FROM city +WHERE slug = ?; + +/* name: CreateCity :exec */ +INSERT INTO city ( + name, + slug +) VALUES ( + ?, + ? +); + +/* name: UpdateCityName :exec */ +UPDATE city +SET name = ? +WHERE slug = ?; diff --git a/examples/ondeck/mysql/query/venue.sql b/examples/ondeck/mysql/query/venue.sql new file mode 100644 index 0000000..a1dd7a1 --- /dev/null +++ b/examples/ondeck/mysql/query/venue.sql @@ -0,0 +1,48 @@ +/* name: ListVenues :many */ +SELECT * +FROM venue +WHERE city = ? +ORDER BY name; + +/* name: DeleteVenue :exec */ +DELETE FROM venue +WHERE slug = ? AND slug = ?; + +/* name: GetVenue :one */ +SELECT * +FROM venue +WHERE slug = ? AND city = ?; + +/* name: CreateVenue :execresult */ +INSERT INTO venue ( + slug, + name, + city, + created_at, + spotify_playlist, + status, + statuses, + tags +) VALUES ( + ?, + ?, + ?, + NOW(), + ?, + ?, + ?, + ? +); + +/* name: UpdateVenueName :exec */ +UPDATE venue +SET name = ? +WHERE slug = ?; + +/* name: VenueCountByCity :many */ +SELECT + city, + count(*) +FROM venue +GROUP BY 1 +ORDER BY 1; diff --git a/examples/ondeck/mysql/schema/0001_city.sql b/examples/ondeck/mysql/schema/0001_city.sql new file mode 100644 index 0000000..6be35d1 --- /dev/null +++ b/examples/ondeck/mysql/schema/0001_city.sql @@ -0,0 +1,4 @@ +CREATE TABLE city ( + slug varchar(255) PRIMARY KEY, + name text NOT NULL +) diff --git a/examples/ondeck/mysql/schema/0002_venue.sql b/examples/ondeck/mysql/schema/0002_venue.sql new file mode 100644 index 0000000..4fc842c --- /dev/null +++ b/examples/ondeck/mysql/schema/0002_venue.sql @@ -0,0 +1,12 @@ +CREATE TABLE venues ( + id SERIAL primary key, + dropped text, + status ENUM('open', 'closed') not null COMMENT 'Venues can be either open or closed', + statuses text, -- status[], + slug text not null COMMENT 'This value appears in public URLs', + name varchar(255) not null, + city text not null references city(slug), + spotify_playlist varchar(255) not null, + songkick_id text, + tags text -- text[] +) COMMENT='Venues are places where muisc happens'; diff --git a/examples/ondeck/mysql/schema/0003_add_column.sql b/examples/ondeck/mysql/schema/0003_add_column.sql new file mode 100644 index 0000000..9b334bc --- /dev/null +++ b/examples/ondeck/mysql/schema/0003_add_column.sql @@ -0,0 +1,3 @@ +ALTER TABLE venues RENAME TO venue; +ALTER TABLE venue ADD COLUMN created_at TIMESTAMP NOT NULL DEFAULT NOW(); +ALTER TABLE venue DROP COLUMN dropped; diff --git a/examples/ondeck/postgresql/Models.fs b/examples/ondeck/postgresql/Models.fs new file mode 100644 index 0000000..c875aa7 --- /dev/null +++ b/examples/ondeck/postgresql/Models.fs @@ -0,0 +1,23 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Ondeck + +open System +open Npgsql + +type City = { Slug: string; Name: string } + +type Venue = + { Id: int + Status: status + Statuses: status + Slug: string + Name: string + City: string + SpotifyPlaylist: string + SongkickId: string option + Tags: string option + CreatedAt: DateTime } + +type VenueCountByCityRow = { City: string; Count: int64 } diff --git a/examples/ondeck/postgresql/Queries.fs b/examples/ondeck/postgresql/Queries.fs new file mode 100644 index 0000000..03d4c56 --- /dev/null +++ b/examples/ondeck/postgresql/Queries.fs @@ -0,0 +1,218 @@ + +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Ondeck + +open System +open Npgsql +open Npgsql.FSharp +open Ondeck.Readers + +module Sqls = + + [] + let createCity = + """ + INSERT INTO city ( + name, + slug +) VALUES ( + @name, + @slug +) RETURNING slug, name + """ + + [] + let createVenue = + """ + INSERT INTO venue ( + slug, + name, + city, + created_at, + spotify_playlist, + status, + statuses, + tags +) VALUES ( + @slug, + @name, + @city, + NOW(), + @spotify_playlist, + @status, + @statuses, + @tags +) RETURNING id + """ + + [] + let deleteVenue = + """ + DELETE FROM venue +WHERE slug = @slug AND slug = @slug + """ + + [] + let getCity = + """ + SELECT slug, name +FROM city +WHERE slug = @slug + """ + + [] + let getVenue = + """ + SELECT id, status, statuses, slug, name, city, spotify_playlist, songkick_id, tags, created_at +FROM venue +WHERE slug = @slug AND city = @city + """ + + [] + let listCities = + """ + SELECT slug, name +FROM city +ORDER BY name + """ + + [] + let listVenues = + """ + SELECT id, status, statuses, slug, name, city, spotify_playlist, songkick_id, tags, created_at +FROM venue +WHERE city = @city +ORDER BY name + """ + + [] + let updateCityName = + """ + UPDATE city +SET name = @name +WHERE slug = @slug + """ + + [] + let updateVenueName = + """ + UPDATE venue +SET name = @name +WHERE slug = @slug +RETURNING id + """ + + [] + let venueCountByCity = + """ + SELECT + city, + count(*) +FROM venue +GROUP BY 1 +ORDER BY 1 + """ + +[] +type DB (conn: string) = + // https://www.connectionstrings.com/npgsql + + /// Create a new city. The slug must be unique. + /// This is the second line of the comment + /// This is the third line + member this.createCity (name: string, slug: string) = + + let parameters = [ ("name", Sql.text name); ("slug", Sql.text slug) ] + + conn + |> Sql.connect + |> Sql.query Sqls.createCity + |> Sql.parameters parameters + |> Sql.executeRow cityReader + + member this.createVenue (slug: string, name: string, city: string, spotifyPlaylist: string, status: status, ?statuses: List<status>, ?tags: List<string option>) = + + let parameters = [ ("slug", Sql.text slug); ("name", Sql.string name); ("city", Sql.text city); ("spotify_playlist", Sql.string spotifyPlaylist); ("status", Sql.unhandled_report_issue status); ("statuses", Sql.unhandled_report_issue statuses); ("tags", Sql.textOrNone tags) ] + + conn + |> Sql.connect + |> Sql.query Sqls.createVenue + |> Sql.parameters parameters + |> Sql.executeRow intReader + + member this.deleteVenue (slug: string) = + + let parameters = [ ("slug", Sql.text slug) ] + + conn + |> Sql.connect + |> Sql.query Sqls.deleteVenue + |> Sql.parameters parameters + |> Sql.executeNonQuery + + member this.getCity (slug: string) = + + let parameters = [ ("slug", Sql.text slug) ] + + conn + |> Sql.connect + |> Sql.query Sqls.getCity + |> Sql.parameters parameters + |> Sql.executeRow cityReader + + member this.getVenue (slug: string, city: string) = + + let parameters = [ ("slug", Sql.text slug); ("city", Sql.text city) ] + + conn + |> Sql.connect + |> Sql.query Sqls.getVenue + |> Sql.parameters parameters + |> Sql.executeRow venueReader + + member this.listCities () = + + conn + |> Sql.connect + |> Sql.query Sqls.listCities + |> Sql.execute cityReader + + member this.listVenues (city: string) = + + let parameters = [ ("city", Sql.text city) ] + + conn + |> Sql.connect + |> Sql.query Sqls.listVenues + |> Sql.parameters parameters + |> Sql.execute venueReader + + member this.updateCityName (slug: string, name: string) = + + let parameters = [ ("slug", Sql.text slug); ("name", Sql.text name) ] + + conn + |> Sql.connect + |> Sql.query Sqls.updateCityName + |> Sql.parameters parameters + |> Sql.executeNonQuery + + member this.updateVenueName (slug: string, name: string) = + + let parameters = [ ("slug", Sql.text slug); ("name", Sql.string name) ] + + conn + |> Sql.connect + |> Sql.query Sqls.updateVenueName + |> Sql.parameters parameters + |> Sql.executeRow intReader + + member this.venueCountByCity () = + + conn + |> Sql.connect + |> Sql.query Sqls.venueCountByCity + |> Sql.execute venueCountByCityRowReader + diff --git a/examples/ondeck/postgresql/Readers.fs b/examples/ondeck/postgresql/Readers.fs new file mode 100644 index 0000000..29d0828 --- /dev/null +++ b/examples/ondeck/postgresql/Readers.fs @@ -0,0 +1,30 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Ondeck + +open System +open Npgsql +open Npgsql.FSharp + +module Readers = + + let venueCountByCityRowReader (r: RowReader) : VenueCountByCityRow = + { VenueCountByCityRow.City = r.text "city" + Count = r.int64 "count" } + + let cityReader (r: RowReader) : City = + { City.Slug = r.text "slug" + Name = r.text "name" } + + let venueReader (r: RowReader) : Venue = + { Venue.Id = r.int "id" + Status = r.unhandled_report_issue "status" + Statuses = r.unhandled_report_issue "statuses" + Slug = r.text "slug" + Name = r.string "name" + City = r.text "city" + SpotifyPlaylist = r.string "spotify_playlist" + SongkickId = r.textOrNone "songkick_id" + Tags = r.textOrNone "tags" + CreatedAt = r.timestamp "created_at" } diff --git a/examples/ondeck/postgresql/query/city.sql b/examples/ondeck/postgresql/query/city.sql new file mode 100644 index 0000000..f34dc99 --- /dev/null +++ b/examples/ondeck/postgresql/query/city.sql @@ -0,0 +1,26 @@ +-- name: ListCities :many +SELECT * +FROM city +ORDER BY name; + +-- name: GetCity :one +SELECT * +FROM city +WHERE slug = $1; + +-- name: CreateCity :one +-- Create a new city. The slug must be unique. +-- This is the second line of the comment +-- This is the third line +INSERT INTO city ( + name, + slug +) VALUES ( + $1, + $2 +) RETURNING *; + +-- name: UpdateCityName :exec +UPDATE city +SET name = $2 +WHERE slug = $1; diff --git a/examples/ondeck/postgresql/query/venue.sql b/examples/ondeck/postgresql/query/venue.sql new file mode 100644 index 0000000..8c6bd02 --- /dev/null +++ b/examples/ondeck/postgresql/query/venue.sql @@ -0,0 +1,49 @@ +-- name: ListVenues :many +SELECT * +FROM venue +WHERE city = $1 +ORDER BY name; + +-- name: DeleteVenue :exec +DELETE FROM venue +WHERE slug = $1 AND slug = $1; + +-- name: GetVenue :one +SELECT * +FROM venue +WHERE slug = $1 AND city = $2; + +-- name: CreateVenue :one +INSERT INTO venue ( + slug, + name, + city, + created_at, + spotify_playlist, + status, + statuses, + tags +) VALUES ( + $1, + $2, + $3, + NOW(), + $4, + $5, + $6, + $7 +) RETURNING id; + +-- name: UpdateVenueName :one +UPDATE venue +SET name = $2 +WHERE slug = $1 +RETURNING id; + +-- name: VenueCountByCity :many +SELECT + city, + count(*) +FROM venue +GROUP BY 1 +ORDER BY 1; diff --git a/examples/ondeck/postgresql/schema/0001_city.sql b/examples/ondeck/postgresql/schema/0001_city.sql new file mode 100644 index 0000000..af38f16 --- /dev/null +++ b/examples/ondeck/postgresql/schema/0001_city.sql @@ -0,0 +1,4 @@ +CREATE TABLE city ( + slug text PRIMARY KEY, + name text NOT NULL +) diff --git a/examples/ondeck/postgresql/schema/0002_venue.sql b/examples/ondeck/postgresql/schema/0002_venue.sql new file mode 100644 index 0000000..940de7a --- /dev/null +++ b/examples/ondeck/postgresql/schema/0002_venue.sql @@ -0,0 +1,18 @@ +CREATE TYPE status AS ENUM ('op!en', 'clo@sed'); +COMMENT ON TYPE status IS 'Venues can be either open or closed'; + +CREATE TABLE venues ( + id SERIAL primary key, + dropped text, + status status not null, + statuses status[], + slug text not null, + name varchar(255) not null, + city text not null references city(slug), + spotify_playlist varchar not null, + songkick_id text, + tags text[] +); +COMMENT ON TABLE venues IS 'Venues are places where muisc happens'; +COMMENT ON COLUMN venues.slug IS 'This value appears in public URLs'; + diff --git a/examples/ondeck/postgresql/schema/0003_add_column.sql b/examples/ondeck/postgresql/schema/0003_add_column.sql new file mode 100644 index 0000000..9b334bc --- /dev/null +++ b/examples/ondeck/postgresql/schema/0003_add_column.sql @@ -0,0 +1,3 @@ +ALTER TABLE venues RENAME TO venue; +ALTER TABLE venue ADD COLUMN created_at TIMESTAMP NOT NULL DEFAULT NOW(); +ALTER TABLE venue DROP COLUMN dropped; diff --git a/examples/ondeck/sqlc.json b/examples/ondeck/sqlc.json new file mode 100644 index 0000000..1bb93b8 --- /dev/null +++ b/examples/ondeck/sqlc.json @@ -0,0 +1,61 @@ +{ + "version": "2", + "plugins": [ + { + "name": "fsharp", + "process": { + "cmd": "/home/ubuntu/bin/sqlc-gen-fsharp" + } + } + ], + "sql": [ + { + "schema": "postgresql/schema", + "queries": "postgresql/query", + "engine": "postgresql", + "codegen": [ + { + "out": "postgresql", + "plugin": "fsharp", + "options": { + "namespace": "Ondeck", + "async": false, + "type_affinity": true + } + } + ] + }, + { + "schema": "mysql/schema", + "queries": "mysql/query", + "engine": "mysql", + "codegen": [ + { + "out": "mysql", + "plugin": "fsharp", + "options": { + "namespace": "Ondeck", + "async": false, + "type_affinity": true + } + } + ] + }, + { + "schema": "sqlite/schema", + "queries": "sqlite/query", + "engine": "sqlite", + "codegen": [ + { + "out": "sqlite", + "plugin": "fsharp", + "options": { + "namespace": "Ondeck", + "async": false, + "type_affinity": true + } + } + ] + } + ] +} diff --git a/examples/ondeck/sqlite/Models.fs b/examples/ondeck/sqlite/Models.fs new file mode 100644 index 0000000..fa973e3 --- /dev/null +++ b/examples/ondeck/sqlite/Models.fs @@ -0,0 +1,23 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Ondeck + +open System +open Fumble + +type City = { Slug: string; Name: string } + +type Venue = + { Id: int + Status: string + Statuses: string option + Slug: string + Name: string + City: string + SpotifyPlaylist: string + SongkickId: string option + Tags: string option + CreatedAt: DateTime } + +type VenueCountByCityRow = { City: string; Count: int } diff --git a/examples/ondeck/sqlite/Queries.fs b/examples/ondeck/sqlite/Queries.fs new file mode 100644 index 0000000..8f0c197 --- /dev/null +++ b/examples/ondeck/sqlite/Queries.fs @@ -0,0 +1,227 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Ondeck + +open System +open Fumble +open Ondeck.Readers + +module Sql = Sqlite +type Sql = Sqlite + +module Sqls = + + [] + let createCity = + """ + INSERT INTO city ( + name, + slug +) VALUES ( + ?, + ? +) + """ + + [] + let createVenue = + """ + INSERT INTO venue ( + slug, + name, + city, + created_at, + spotify_playlist, + status, + statuses, + tags +) VALUES ( + ?, + ?, + ?, + CURRENT_TIMESTAMP, + ?, + ?, + ?, + ? +) + """ + + [] + let deleteVenue = + """ + DELETE FROM venue +WHERE slug = ? AND slug = ? + """ + + [] + let getCity = + """ + SELECT slug, name +FROM city +WHERE slug = ? + """ + + [] + let getVenue = + """ + SELECT id, status, statuses, slug, name, city, spotify_playlist, songkick_id, tags, created_at +FROM venue +WHERE slug = ? AND city = ? + """ + + [] + let listCities = + """ + SELECT slug, name +FROM city +ORDER BY name + """ + + [] + let listVenues = + """ + SELECT id, status, statuses, slug, name, city, spotify_playlist, songkick_id, tags, created_at +FROM venue +WHERE city = ? +ORDER BY name + """ + + [] + let updateCityName = + """ + UPDATE city +SET name = ? +WHERE slug = ? + """ + + [] + let updateVenueName = + """ + UPDATE venue +SET name = ? +WHERE slug = ? + """ + + [] + let venueCountByCity = + """ + SELECT + city, + count(*) +FROM venue +GROUP BY 1 +ORDER BY 1 + """ + +[] +type DB(conn: string) = + // https://www.connectionstrings.com/sqlite-net-provider + + member this.createCity(name: string, slug: string) = + + let parameters = [ ("name", Sql.string name); ("slug", Sql.string slug) ] + + conn + |> Sql.connect + |> Sql.query Sqls.createCity + |> Sql.parameters parameters + |> Sql.executeNonQuery + + member this.createVenue + ( + slug: string, + name: string, + city: string, + spotifyPlaylist: string, + status: string, + ?statuses: string, + ?tags: string + ) = + + let parameters = + [ ("slug", Sql.string slug) + ("name", Sql.string name) + ("city", Sql.string city) + ("spotify_playlist", Sql.string spotifyPlaylist) + ("status", Sql.string status) + ("statuses", Sql.stringOrNone statuses) + ("tags", Sql.stringOrNone tags) ] + + conn + |> Sql.connect + |> Sql.query Sqls.createVenue + |> Sql.parameters parameters + |> Sql.executeNonQuery + + member this.deleteVenue(slug: string, slug_2: string) = + + let parameters = [ ("slug", Sql.string slug); ("slug", Sql.string slug_2) ] + + conn + |> Sql.connect + |> Sql.query Sqls.deleteVenue + |> Sql.parameters parameters + |> Sql.executeNonQuery + + member this.getCity(slug: string) = + + let parameters = [ ("slug", Sql.string slug) ] + + conn + |> Sql.connect + |> Sql.query Sqls.getCity + |> Sql.parameters parameters + |> Sql.execute cityReader + + member this.getVenue(slug: string, city: string) = + + let parameters = [ ("slug", Sql.string slug); ("city", Sql.string city) ] + + conn + |> Sql.connect + |> Sql.query Sqls.getVenue + |> Sql.parameters parameters + |> Sql.execute venueReader + + member this.listCities() = + + conn |> Sql.connect |> Sql.query Sqls.listCities |> Sql.execute cityReader + + member this.listVenues(city: string) = + + let parameters = [ ("city", Sql.string city) ] + + conn + |> Sql.connect + |> Sql.query Sqls.listVenues + |> Sql.parameters parameters + |> Sql.execute venueReader + + member this.updateCityName(name: string, slug: string) = + + let parameters = [ ("name", Sql.string name); ("slug", Sql.string slug) ] + + conn + |> Sql.connect + |> Sql.query Sqls.updateCityName + |> Sql.parameters parameters + |> Sql.executeNonQuery + + member this.updateVenueName(name: string, slug: string) = + + let parameters = [ ("name", Sql.string name); ("slug", Sql.string slug) ] + + conn + |> Sql.connect + |> Sql.query Sqls.updateVenueName + |> Sql.parameters parameters + |> Sql.executeNonQuery + + member this.venueCountByCity() = + + conn + |> Sql.connect + |> Sql.query Sqls.venueCountByCity + |> Sql.execute venueCountByCityRowReader diff --git a/examples/ondeck/sqlite/Readers.fs b/examples/ondeck/sqlite/Readers.fs new file mode 100644 index 0000000..5d31853 --- /dev/null +++ b/examples/ondeck/sqlite/Readers.fs @@ -0,0 +1,31 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace Ondeck + +open System +open Fumble + +type RowReader = SqliteRowReader + +module Readers = + + let cityReader (r: RowReader) : City = + { City.Slug = r.string "slug" + Name = r.string "name" } + + let venueReader (r: RowReader) : Venue = + { Venue.Id = r.int "id" + Status = r.string "status" + Statuses = r.stringOrNone "statuses" + Slug = r.string "slug" + Name = r.string "name" + City = r.string "city" + SpotifyPlaylist = r.string "spotify_playlist" + SongkickId = r.stringOrNone "songkick_id" + Tags = r.stringOrNone "tags" + CreatedAt = r.dateTime "created_at" } + + let venueCountByCityRowReader (r: RowReader) : VenueCountByCityRow = + { VenueCountByCityRow.City = r.string "city" + Count = r.int "count" } diff --git a/examples/ondeck/sqlite/query/city.sql b/examples/ondeck/sqlite/query/city.sql new file mode 100644 index 0000000..c387e9d --- /dev/null +++ b/examples/ondeck/sqlite/query/city.sql @@ -0,0 +1,23 @@ +/* name: ListCities :many */ +SELECT * +FROM city +ORDER BY name; + +/* name: GetCity :one */ +SELECT * +FROM city +WHERE slug = ?; + +/* name: CreateCity :exec */ +INSERT INTO city ( + name, + slug +) VALUES ( + ?, + ? +); + +/* name: UpdateCityName :exec */ +UPDATE city +SET name = ? +WHERE slug = ?; diff --git a/examples/ondeck/sqlite/query/venue.sql b/examples/ondeck/sqlite/query/venue.sql new file mode 100644 index 0000000..b4f5fd4 --- /dev/null +++ b/examples/ondeck/sqlite/query/venue.sql @@ -0,0 +1,48 @@ +/* name: ListVenues :many */ +SELECT * +FROM venue +WHERE city = ? +ORDER BY name; + +/* name: DeleteVenue :exec */ +DELETE FROM venue +WHERE slug = ? AND slug = ?; + +/* name: GetVenue :one */ +SELECT * +FROM venue +WHERE slug = ? AND city = ?; + +/* name: CreateVenue :execresult */ +INSERT INTO venue ( + slug, + name, + city, + created_at, + spotify_playlist, + status, + statuses, + tags +) VALUES ( + ?, + ?, + ?, + CURRENT_TIMESTAMP, + ?, + ?, + ?, + ? +); + +/* name: UpdateVenueName :exec */ +UPDATE venue +SET name = ? +WHERE slug = ?; + +/* name: VenueCountByCity :many */ +SELECT + city, + count(*) +FROM venue +GROUP BY 1 +ORDER BY 1; diff --git a/examples/ondeck/sqlite/schema/0001_city.sql b/examples/ondeck/sqlite/schema/0001_city.sql new file mode 100644 index 0000000..6be35d1 --- /dev/null +++ b/examples/ondeck/sqlite/schema/0001_city.sql @@ -0,0 +1,4 @@ +CREATE TABLE city ( + slug varchar(255) PRIMARY KEY, + name text NOT NULL +) diff --git a/examples/ondeck/sqlite/schema/0002_venue.sql b/examples/ondeck/sqlite/schema/0002_venue.sql new file mode 100644 index 0000000..e57166e --- /dev/null +++ b/examples/ondeck/sqlite/schema/0002_venue.sql @@ -0,0 +1,13 @@ +CREATE TABLE venues ( + id integer primary key AUTOINCREMENT, + dropped text, + status text not null, + statuses text, -- status[] + slug text not null, + name varchar(255) not null, + city text not null references city(slug), + spotify_playlist varchar(255) not null, + songkick_id text, + tags text, -- tags[] + CHECK (status = 'open' OR status = 'closed') +); diff --git a/examples/ondeck/sqlite/schema/0003_add_column.sql b/examples/ondeck/sqlite/schema/0003_add_column.sql new file mode 100644 index 0000000..7d7a644 --- /dev/null +++ b/examples/ondeck/sqlite/schema/0003_add_column.sql @@ -0,0 +1,3 @@ +ALTER TABLE venues RENAME TO venue; +ALTER TABLE venue ADD COLUMN created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP; +ALTER TABLE venue DROP COLUMN dropped; diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..37cd9bb --- /dev/null +++ b/go.mod @@ -0,0 +1,16 @@ +module github.com/kaashyapan/sqlc-gen-fsharp + +go 1.19 + +require ( + github.com/jinzhu/inflection v1.0.0 + github.com/mailru/easyjson v0.7.7 + github.com/tabbed/sqlc-go v1.16.0 +) + +require ( + github.com/josharian/intern v1.0.0 // indirect + github.com/valyala/bytebufferpool v1.0.0 // indirect + github.com/valyala/quicktemplate v1.7.0 // indirect + google.golang.org/protobuf v1.28.1 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..09e93a5 --- /dev/null +++ b/go.sum @@ -0,0 +1,36 @@ +github.com/andybalholm/brotli v1.0.2/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= +github.com/andybalholm/brotli v1.0.3/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= +github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/klauspost/compress v1.13.4/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= +github.com/klauspost/compress v1.13.5/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/tabbed/sqlc-go v1.16.0 h1:EwPBXdGn5tyrLjcNiHRoQthWvJeF5NjG9Cx1WK5iFsY= +github.com/tabbed/sqlc-go v1.16.0/go.mod h1:mqMU5duZRGz5Wp/qJXwkERf+MXgGOZ8BmW/tH9KyvWA= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasthttp v1.30.0/go.mod h1:2rsYD01CKFrjjsvFxx75KlEUNpWNBY9JWD3K/7o2Cus= +github.com/valyala/quicktemplate v1.7.0 h1:LUPTJmlVcb46OOUY3IeD9DojFpAVbsG+5WFTcjMJzCM= +github.com/valyala/quicktemplate v1.7.0/go.mod h1:sqKJnoaOF88V07vkO+9FL8fb9uZg/VPSJnLYn+LmLk8= +github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc= +golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210510120150-4163338589ed/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= +google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= diff --git a/internal/core/config.go b/internal/core/config.go new file mode 100644 index 0000000..d67ff90 --- /dev/null +++ b/internal/core/config.go @@ -0,0 +1,10 @@ +package core + +type Config struct { + Package string `json:"namespace"` + EmitExactTableNames bool `json:"emit_exact_table_names"` + Async bool `json:"async"` + EmitClasses bool `json:"emit_classes"` + TypeAffinity bool `json:"type_affinity" default:"true"` + InflectionExcludeTableNames []string `json:"inflection_exclude_table_names"` +} diff --git a/internal/core/config_easyjson.go b/internal/core/config_easyjson.go new file mode 100644 index 0000000..a2607a1 --- /dev/null +++ b/internal/core/config_easyjson.go @@ -0,0 +1,152 @@ +// Code generated by easyjson for marshaling/unmarshaling. DO NOT EDIT. + +package core + +import ( + json "encoding/json" + easyjson "github.com/mailru/easyjson" + jlexer "github.com/mailru/easyjson/jlexer" + jwriter "github.com/mailru/easyjson/jwriter" +) + +// suppress unused package warning +var ( + _ *json.RawMessage + _ *jlexer.Lexer + _ *jwriter.Writer + _ easyjson.Marshaler +) + +func easyjson6615c02eDecodeGithubComKaashyapanSqlcGenFsharpInternalCore(in *jlexer.Lexer, out *Config) { + isTopLevel := in.IsStart() + if in.IsNull() { + if isTopLevel { + in.Consumed() + } + in.Skip() + return + } + in.Delim('{') + for !in.IsDelim('}') { + key := in.UnsafeFieldName(false) + in.WantColon() + if in.IsNull() { + in.Skip() + in.WantComma() + continue + } + switch key { + case "namespace": + out.Package = string(in.String()) + case "emit_exact_table_names": + out.EmitExactTableNames = bool(in.Bool()) + case "async": + out.Async = bool(in.Bool()) + case "emit_classes": + out.EmitClasses = bool(in.Bool()) + case "type_affinity": + out.TypeAffinity = bool(in.Bool()) + case "inflection_exclude_table_names": + if in.IsNull() { + in.Skip() + out.InflectionExcludeTableNames = nil + } else { + in.Delim('[') + if out.InflectionExcludeTableNames == nil { + if !in.IsDelim(']') { + out.InflectionExcludeTableNames = make([]string, 0, 4) + } else { + out.InflectionExcludeTableNames = []string{} + } + } else { + out.InflectionExcludeTableNames = (out.InflectionExcludeTableNames)[:0] + } + for !in.IsDelim(']') { + var v1 string + v1 = string(in.String()) + out.InflectionExcludeTableNames = append(out.InflectionExcludeTableNames, v1) + in.WantComma() + } + in.Delim(']') + } + default: + in.SkipRecursive() + } + in.WantComma() + } + in.Delim('}') + if isTopLevel { + in.Consumed() + } +} +func easyjson6615c02eEncodeGithubComKaashyapanSqlcGenFsharpInternalCore(out *jwriter.Writer, in Config) { + out.RawByte('{') + first := true + _ = first + { + const prefix string = ",\"namespace\":" + out.RawString(prefix[1:]) + out.String(string(in.Package)) + } + { + const prefix string = ",\"emit_exact_table_names\":" + out.RawString(prefix) + out.Bool(bool(in.EmitExactTableNames)) + } + { + const prefix string = ",\"async\":" + out.RawString(prefix) + out.Bool(bool(in.Async)) + } + { + const prefix string = ",\"emit_classes\":" + out.RawString(prefix) + out.Bool(bool(in.EmitClasses)) + } + { + const prefix string = ",\"type_affinity\":" + out.RawString(prefix) + out.Bool(bool(in.TypeAffinity)) + } + { + const prefix string = ",\"inflection_exclude_table_names\":" + out.RawString(prefix) + if in.InflectionExcludeTableNames == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { + out.RawString("null") + } else { + out.RawByte('[') + for v2, v3 := range in.InflectionExcludeTableNames { + if v2 > 0 { + out.RawByte(',') + } + out.String(string(v3)) + } + out.RawByte(']') + } + } + out.RawByte('}') +} + +// MarshalJSON supports json.Marshaler interface +func (v Config) MarshalJSON() ([]byte, error) { + w := jwriter.Writer{} + easyjson6615c02eEncodeGithubComKaashyapanSqlcGenFsharpInternalCore(&w, v) + return w.Buffer.BuildBytes(), w.Error +} + +// MarshalEasyJSON supports easyjson.Marshaler interface +func (v Config) MarshalEasyJSON(w *jwriter.Writer) { + easyjson6615c02eEncodeGithubComKaashyapanSqlcGenFsharpInternalCore(w, v) +} + +// UnmarshalJSON supports json.Unmarshaler interface +func (v *Config) UnmarshalJSON(data []byte) error { + r := jlexer.Lexer{Data: data} + easyjson6615c02eDecodeGithubComKaashyapanSqlcGenFsharpInternalCore(&r, v) + return r.Error() +} + +// UnmarshalEasyJSON supports easyjson.Unmarshaler interface +func (v *Config) UnmarshalEasyJSON(l *jlexer.Lexer) { + easyjson6615c02eDecodeGithubComKaashyapanSqlcGenFsharpInternalCore(l, v) +} diff --git a/internal/core/gen.go b/internal/core/gen.go new file mode 100644 index 0000000..04969a9 --- /dev/null +++ b/internal/core/gen.go @@ -0,0 +1,696 @@ +package core + +import ( + "bytes" + "errors" + "fmt" + "regexp" + "sort" + "strconv" + "strings" + + easyjson "github.com/mailru/easyjson" + plugin "github.com/tabbed/sqlc-go/codegen" + "github.com/tabbed/sqlc-go/metadata" + "github.com/tabbed/sqlc-go/sdk" + + "github.com/kaashyapan/sqlc-gen-fsharp/internal/inflection" +) + +var fsIdentPattern = regexp.MustCompile("[^a-zA-Z0-9_]+") + +type Constant struct { + Name string + Type string + Value string +} + +type Enum struct { + Name string + Comment string + Constants []Constant +} + +type Field struct { + ID int + Name string + Type fsType + Comment string +} + +type Struct struct { + Table plugin.Identifier + Name string + Fields []Field + Comment string +} + +type QueryValue struct { + Emit bool + Name string + Struct *Struct + Typ fsType +} + +func (v QueryValue) EmitStruct() bool { + return v.Emit +} + +func (v QueryValue) IsStruct() bool { + return v.Struct != nil +} + +func (v QueryValue) isEmpty() bool { + return v.Typ == (fsType{}) && v.Name == "" && v.Struct == nil +} + +func (v QueryValue) Type() string { + if v.Typ != (fsType{}) { + return v.Typ.String() + } + if v.Struct != nil { + return v.Struct.Name + } + panic("no type for QueryValue: " + v.Name) +} + +type Params struct { + Struct *Struct + binding []int +} + +func (v Params) isEmpty() bool { + return len(v.Struct.Fields) == 0 +} + +func (v Params) Args() string { + if v.isEmpty() { + return "" + } + var requiredArgs []string + var optionalArgs []string + fields := v.Struct.Fields + for _, f := range fields { + + if f.Type.IsNull { + typ := strings.TrimSuffix(f.Type.String(), " option") + optionalArgs = append(optionalArgs, "?"+f.Name+": "+typ) + } else { + requiredArgs = append(requiredArgs, f.Name+": "+f.Type.String()) + } + + } + + out := append(requiredArgs, optionalArgs...) + return strings.Join(out, ", ") +} + +func (v Params) Bindings(engine string) []string { + var out []string + + for _, f := range v.Struct.Fields { + item := fmt.Sprintf(`("%s", Sql.%s %s)`, f.Type.DbName, f.Type.LibTyp, f.Name) + out = append(out, item) + } + + return out +} + +func indent(s string, n int, firstIndent int) string { + lines := strings.Split(s, "\n") + buf := bytes.NewBuffer(nil) + for i, l := range lines { + indent := n + if i == 0 && firstIndent != -1 { + indent = firstIndent + } + if i != 0 { + buf.WriteRune('\n') + } + for i := 0; i < indent; i++ { + buf.WriteRune(' ') + } + buf.WriteString(l) + } + return buf.String() +} + +// A struct used to generate methods and fields on the Queries struct +type Query struct { + ClassName string + Cmd string + Comments []string + MethodName string + FieldName string + ConstantName string + SQL string + SourceName string + Ret QueryValue + Arg Params +} + +func fsEnumValueName(value string) string { + id := strings.Replace(value, "-", "_", -1) + id = strings.Replace(id, ":", "_", -1) + id = strings.Replace(id, "/", "_", -1) + id = fsIdentPattern.ReplaceAllString(id, "") + return strings.ToUpper(id) +} + +func BuildEnums(req *plugin.CodeGenRequest) []Enum { + var enums []Enum + for _, schema := range req.Catalog.Schemas { + if schema.Name == "pg_catalog" || schema.Name == "information_schema" { + continue + } + for _, enum := range schema.Enums { + var enumName string + if schema.Name == req.Catalog.DefaultSchema { + enumName = enum.Name + } else { + enumName = schema.Name + "_" + enum.Name + } + e := Enum{ + Name: dataClassName(enumName, req.Settings), + Comment: enum.Comment, + } + for _, v := range enum.Vals { + e.Constants = append(e.Constants, Constant{ + Name: fsEnumValueName(v), + Value: v, + Type: e.Name, + }) + } + enums = append(enums, e) + } + } + if len(enums) > 0 { + sort.Slice(enums, func(i, j int) bool { return enums[i].Name < enums[j].Name }) + } + return enums +} + +func dataClassName(name string, settings *plugin.Settings) string { + if rename := settings.Rename[name]; rename != "" { + return rename + } + out := "" + for _, p := range strings.Split(name, "_") { + out += sdk.Title(p) + } + return out +} + +func memberName(name string, settings *plugin.Settings) string { + return sdk.LowerTitle(dataClassName(name, settings)) +} + +func BuildDataClasses(conf Config, req *plugin.CodeGenRequest) []Struct { + var structs []Struct + for _, schema := range req.Catalog.Schemas { + if schema.Name == "pg_catalog" || schema.Name == "information_schema" { + continue + } + for _, table := range schema.Tables { + var tableName string + if schema.Name == req.Catalog.DefaultSchema { + tableName = table.Rel.Name + } else { + tableName = schema.Name + "_" + table.Rel.Name + } + structName := dataClassName(tableName, req.Settings) + if !conf.EmitExactTableNames { + structName = inflection.Singular(inflection.SingularParams{ + Name: structName, + Exclusions: conf.InflectionExcludeTableNames, + }) + } + s := Struct{ + Table: plugin.Identifier{Schema: schema.Name, Name: table.Rel.Name}, + Name: structName, + Comment: table.Comment, + } + for _, column := range table.Columns { + s.Fields = append(s.Fields, Field{ + Name: memberName(column.Name, req.Settings), + Type: makeType(req, column), + Comment: column.Comment, + }) + } + structs = append(structs, s) + } + } + return structs +} + +type fsType struct { + Name string + LibTyp string + ReaderTyp string + DbName string + IsEnum bool + IsArray bool + IsNull bool + DataType string + Engine string +} + +func (t fsType) String() string { + v := t.Name + if t.IsArray { + v = fmt.Sprintf("List<%s>", v) + } + return v +} + +func (t fsType) IsTime() bool { + return t.Name == "LocalDate" || t.Name == "LocalDateTime" || t.Name == "LocalTime" || t.Name == "OffsetDateTime" +} + +func (t fsType) IsInstant() bool { + return t.Name == "Instant" +} + +func (t fsType) IsUUID() bool { + return t.Name == "UUID" +} + +func makeType(req *plugin.CodeGenRequest, col *plugin.Column) fsType { + fstyp, readerTyp, libTyp, isEnum := fsInnerType(req, col) + return fsType{ + Name: fstyp, + LibTyp: libTyp, + ReaderTyp: readerTyp, + DbName: col.Name, + IsEnum: isEnum, + IsArray: col.IsArray, + IsNull: !col.NotNull, + DataType: sdk.DataType(col.Type), + Engine: req.Settings.Engine, + } +} + +func fsInnerType(req *plugin.CodeGenRequest, col *plugin.Column) (string, string, string, bool) { + // TODO: Extend the engine interface to handle types + switch req.Settings.Engine { + case "mysql": + return mysqlType(req, col) + case "postgresql": + return postgresType(req, col) + case "sqlite": + return sqliteType(req, col) + default: + return "any", "any", "any", false + } +} + +type goColumn struct { + id int + *plugin.Column +} + +func fsColumnsToStruct(req *plugin.CodeGenRequest, name string, columns []goColumn, namer func(*plugin.Column, int) string) *Struct { + gs := Struct{ + Name: name, + } + idSeen := map[int]Field{} + nameSeen := map[string]int{} + for _, c := range columns { + if _, ok := idSeen[c.id]; ok { + continue + } + fieldName := memberName(namer(c.Column, c.id), req.Settings) + if v := nameSeen[c.Name]; v > 0 { + fieldName = fmt.Sprintf("%s_%d", fieldName, v+1) + } + field := Field{ + ID: c.id, + Name: fieldName, + Type: makeType(req, c.Column), + } + gs.Fields = append(gs.Fields, field) + nameSeen[c.Name]++ + idSeen[c.id] = field + } + return &gs +} + +func fsArgName(name string) string { + out := "" + for i, p := range strings.Split(name, "_") { + if i == 0 { + out += strings.ToLower(p) + } else { + out += sdk.Title(p) + } + } + return out +} + +func fsParamName(c *plugin.Column, number int) string { + if c.Name != "" { + return fsArgName(c.Name) + } + return fmt.Sprintf("dollar_%d", number) +} + +func fsColumnName(c *plugin.Column, pos int) string { + if c.Name != "" { + return c.Name + } + return fmt.Sprintf("column_%d", pos+1) +} + +// HACK: jdbc doesn't support numbered parameters, so we need to transform them to question marks... +// But there's no access to the SQL parser here, so we just do a dumb regexp replace instead. This won't work if +// the literal strings contain matching values, but good enough for a prototype. +func jdbcSQL(s, engine string) (string, []string) { + return s, nil +} + +// Converts $1 to @id +func reformatSqlParamNames(q Query) string { + rawQuery := q.SQL + if q.Arg.isEmpty() { + return rawQuery + } + + if len(q.Arg.binding) > 0 { + for _, idx := range q.Arg.binding { + f := q.Arg.Struct.Fields[idx-1] + token := `\$` + strconv.Itoa(idx+1) + `\b` + regx := regexp.MustCompile(token) + newToken := "@" + f.Type.DbName + rawQuery = regx.ReplaceAllString(rawQuery, newToken) + } + } else { + for i, f := range q.Arg.Struct.Fields { + token := `\$` + strconv.Itoa(i+1) + `\b` + regx := regexp.MustCompile(token) + newToken := "@" + f.Type.DbName + rawQuery = regx.ReplaceAllString(rawQuery, newToken) + } + } + + return rawQuery + +} + +// provide initial connection string +func (t TmplCtx) ConnString() []string { + if t.Settings.Engine == "postgresql" { + out := []string{"// https://www.connectionstrings.com/npgsql"} + return out + } + if t.Settings.Engine == "sqlite" { + out := []string{"// https://www.connectionstrings.com/sqlite-net-provider"} + return out + } + return nil +} + +// provide initial connection string +func (t TmplCtx) ConnPipeline(q Query) []string { + out := []string{} + argCnt := len(q.Arg.Bindings(t.Settings.Engine)) + + if argCnt > 0 { + paramstr := fmt.Sprintf("let parameters = [ %s ]", strings.Join(q.Arg.Bindings(t.Settings.Engine), "; ")) + out = append(out, paramstr) + out = append(out, "") + } + + out = append(out, "conn") + out = append(out, "|> Sql.connect") + out = append(out, "|> Sql.query Sqls."+q.ConstantName) + + if argCnt > 0 { + out = append(out, "|> Sql.parameters parameters") + } + out = append(out, "|> Sql."+ExecCommand(t, q)) + + return out +} + +func parseInts(s []string) ([]int, error) { + if len(s) == 0 { + return nil, nil + } + var refs []int + for _, v := range s { + i, err := strconv.Atoi(strings.TrimPrefix(v, "$")) + if err != nil { + return nil, err + } + refs = append(refs, i) + } + return refs, nil +} + +func BuildQueries(req *plugin.CodeGenRequest, structs []Struct) ([]Query, error) { + qs := make([]Query, 0, len(req.Queries)) + for _, query := range req.Queries { + if query.Name == "" { + continue + } + if query.Cmd == "" { + continue + } + if query.Cmd == metadata.CmdCopyFrom { + return nil, errors.New("Support for CopyFrom in fsharp is not implemented") + } + + ql, args := jdbcSQL(query.Text, req.Settings.Engine) + refs, err := parseInts(args) + if err != nil { + return nil, fmt.Errorf("Invalid parameter reference: %w", err) + } + gq := Query{ + Cmd: query.Cmd, + ClassName: sdk.Title(query.Name), + ConstantName: sdk.LowerTitle(query.Name), + FieldName: sdk.Title(query.Name), + MethodName: sdk.LowerTitle(query.Name), + SourceName: query.Filename, + SQL: ql, + Comments: query.GetComments(), + } + + var cols []goColumn + for _, p := range query.Params { + cols = append(cols, goColumn{ + id: int(p.Number), + Column: p.Column, + }) + } + params := fsColumnsToStruct(req, gq.ClassName+"Bindings", cols, fsParamName) + gq.Arg = Params{ + Struct: params, + binding: refs, + } + + if len(query.Columns) == 1 { + c := query.Columns[0] + gq.Ret = QueryValue{ + Name: "results", + Typ: makeType(req, c), + } + } else if len(query.Columns) > 1 { + var gs *Struct + var emit bool + + for _, s := range structs { + if len(s.Fields) != len(query.Columns) { + continue + } + same := true + for i, f := range s.Fields { + c := query.Columns[i] + sameName := f.Name == memberName(fsColumnName(c, i), req.Settings) + sameType := f.Type == makeType(req, c) + sameTable := sdk.SameTableName(c.Table, &s.Table, req.Catalog.DefaultSchema) + + if !sameName || !sameType || !sameTable { + same = false + } + } + if same { + gs = &s + break + } + } + + if gs == nil { + var columns []goColumn + for i, c := range query.Columns { + columns = append(columns, goColumn{ + id: i, + Column: c, + }) + } + gs = fsColumnsToStruct(req, gq.ClassName+"Row", columns, fsColumnName) + emit = true + } + gq.Ret = QueryValue{ + Emit: emit, + Name: "results", + Struct: gs, + } + } + + gq.SQL = reformatSqlParamNames(gq) + + qs = append(qs, gq) + } + sort.Slice(qs, func(i, j int) bool { return qs[i].MethodName < qs[j].MethodName }) + return qs, nil +} + +type TmplCtx struct { + Q string + Package string + Enums []Enum + DataClasses []Struct + Queries []Query + Settings *plugin.Settings + SqlcVersion string + // TODO: Race conditions + SourceName string + + Configuration Config + EmitJSONTags bool + EmitPreparedQueries bool + EmitInterface bool +} + +func makeReaderString(lookup map[string]Query, t TmplCtx) []string { + var readers []string + var fields []string + + for _, v := range lookup { + fields = []string{} + //cnt := len(v.Ret.Struct.Fields) + for _, item := range v.Ret.Struct.Fields { + field := fmt.Sprintf(`%s = r.%s "%s"`, sdk.Title(item.Name), item.Type.ReaderTyp, item.Type.DbName) + fields = append(fields, field) + } + recstr := strings.Join(fields, " ; ") + str := fmt.Sprintf(`let %sReader (r: RowReader) : %s = { %s.%s }`, sdk.LowerTitle(v.Ret.Type()), v.Ret.Type(), v.Ret.Type(), recstr) + readers = append(readers, str) + } + return readers + +} + +func (v TmplCtx) ReaderSet() []string { + + lookup := map[string]Query{} + if eval := v.Queries; len(eval) != 0 { + for _, dot := range eval { + _ = dot + if dot.Cmd == ":one" || dot.Cmd == ":many" { + if dot.Ret.IsStruct() { + lookup[dot.Ret.Type()] = dot + } + } + } + } + + return makeReaderString(lookup, v) +} + +func (v TmplCtx) ExtraModels() []string { + + lookup := map[string]Query{} + + if eval := v.DataClasses; len(eval) != 0 { + } + if eval := v.Queries; len(eval) != 0 { + for _, dot := range eval { + _ = dot + if dot.Cmd == ":one" || dot.Cmd == ":many" { + if dot.Ret.IsStruct() { + lookup[dot.Ret.Type()] = dot + } + } + } + } + + return makeReaderString(lookup, v) +} + +func Offset(v int) int { + return v + 1 +} + +func ExecCommand(t TmplCtx, q Query) string { + + if t.Configuration.Async { + switch t.Settings.Engine { + case "postgresql": + switch q.Cmd { + case ":one": + return "executeRowAsync" + " " + sdk.LowerTitle(q.Ret.Type()) + "Reader" + case ":many": + return "executeAsync" + " " + sdk.LowerTitle(q.Ret.Type()) + "Reader" + default: + return "executeNonQueryAsync" + } + case "sqlite": + switch q.Cmd { + case ":one", ":many": + return "executeAsync" + " " + sdk.LowerTitle(q.Ret.Type()) + "Reader" + default: + return "executeNonQueryAsync" + } + } + } else { + switch t.Settings.Engine { + case "postgresql": + switch q.Cmd { + case ":one": + return "executeRow" + " " + sdk.LowerTitle(q.Ret.Type()) + "Reader" + case ":many": + return "execute" + " " + sdk.LowerTitle(q.Ret.Type()) + "Reader" + default: + return "executeNonQuery" + } + case "sqlite": + switch q.Cmd { + case ":one", ":many": + return "execute" + " " + sdk.LowerTitle(q.Ret.Type()) + "Reader" + default: + return "executeNonQuery" + } + + } + } + return "" +} + +func MakeConfig(req *plugin.Request) (Config, error) { + + var conf Config + if len(req.PluginOptions) > 0 { + if err := easyjson.Unmarshal(req.PluginOptions, &conf); err != nil { + return conf, err + } + } + return conf, nil +} + +func Format(s string) string { + // TODO: do more than just skip multiple blank lines, like maybe run fslint to format + skipNextSpace := false + var lines []string + for _, l := range strings.Split(s, "\n") { + isSpace := len(strings.TrimSpace(l)) == 0 + if !isSpace || !skipNextSpace { + lines = append(lines, l) + } + skipNextSpace = isSpace + } + o := strings.Join(lines, "\n") + o += "\n" + return o +} diff --git a/internal/core/imports.go b/internal/core/imports.go new file mode 100644 index 0000000..9155369 --- /dev/null +++ b/internal/core/imports.go @@ -0,0 +1,164 @@ +package core + +import ( + "fmt" + "strings" + + plugin "github.com/tabbed/sqlc-go/codegen" +) + +type Importer struct { + Settings *plugin.Settings + DataClasses []Struct + Enums []Enum + Queries []Query +} + +func (i *Importer) Imports(filename string, pkgName string) []string { + switch filename { + case "Models.fs": + return i.modelImports() + case "Readers.fs": + return i.readersImports(pkgName) + default: + return i.queryImports(pkgName) + } +} + +func (i *Importer) readersImports(pkgName string) []string { + uses := func(name string) bool { + for _, q := range i.Queries { + if !q.Ret.isEmpty() { + if strings.HasPrefix(q.Ret.Type(), name) { + return true + } + } + if !q.Arg.isEmpty() { + for _, f := range q.Arg.Struct.Fields { + if strings.HasPrefix(f.Type.Name, name) { + return true + } + } + } + } + return false + } + + std := stdImports(uses) + stds := make([]string, 0, len(std)) + stds = append(stds, std...) + + switch i.Settings.Engine { + case "postgresql": + stds = append(stds, "Npgsql") + stds = append(stds, "Npgsql.FSharp") + + case "sqlite": + stds = append(stds, "Fumble") + default: + + } + + return stds +} + +func (i *Importer) modelImports() []string { + uses := func(name string) bool { + for _, q := range i.Queries { + if !q.Ret.isEmpty() { + if q.Ret.Struct != nil { + for _, f := range q.Ret.Struct.Fields { + if f.Type.Name == name { + return true + } + } + } + if q.Ret.Type() == name { + return true + } + } + if !q.Arg.isEmpty() { + for _, f := range q.Arg.Struct.Fields { + if f.Type.Name == name { + return true + } + } + } + } + return false + } + + std := stdImports(uses) + + switch i.Settings.Engine { + case "postgresql": + std = append(std, "Npgsql") + + case "sqlite": + std = append(std, "Fumble") + default: + + } + + return std +} + +func stdImports(uses func(name string) bool) []string { + out := []string{"System"} + return out +} + +func (i *Importer) queryImports(pkgName string) []string { + + uses := func(name string) bool { + for _, q := range i.Queries { + if !q.Ret.isEmpty() { + if q.Ret.Struct != nil { + for _, f := range q.Ret.Struct.Fields { + if f.Type.Name == name { + return true + } + } + } + if q.Ret.Type() == name { + return true + } + } + if !q.Arg.isEmpty() { + for _, f := range q.Arg.Struct.Fields { + if f.Type.Name == name { + return true + } + } + } + } + return false + } + + std := stdImports(uses) + + stds := make([]string, 0, len(std)) + stds = append(stds, std...) + + switch i.Settings.Engine { + case "postgresql": + stds = append(stds, "Npgsql") + stds = append(stds, "Npgsql.FSharp") + + case "sqlite": + stds = append(stds, "Fumble") + default: + + } + + switch i.Settings.Engine { + case "mysql": + return stds + + default: + packageImports := []string{fmt.Sprintf("%s.Readers", pkgName)} + stds = append(stds, packageImports...) + } + + return stds +} diff --git a/internal/core/maps.go b/internal/core/maps.go new file mode 100644 index 0000000..2e14ac6 --- /dev/null +++ b/internal/core/maps.go @@ -0,0 +1,22 @@ +package core + +import ( + "github.com/tabbed/sqlc-go/sdk" +) + +func DoubleSlashComment(f string) string { + return sdk.DoubleSlashComment(f) +} + +func LowerTitle(f string) string { + return sdk.LowerTitle(f) +} + +var DefaultImporter *Importer + +func Imports(filename string, pkgName string) []string { + if DefaultImporter == nil { + return nil + } + return DefaultImporter.Imports(filename, pkgName) +} diff --git a/internal/core/mysql_type.go b/internal/core/mysql_type.go new file mode 100644 index 0000000..34c889d --- /dev/null +++ b/internal/core/mysql_type.go @@ -0,0 +1,107 @@ +package core + +import ( + plugin "github.com/tabbed/sqlc-go/codegen" + "github.com/tabbed/sqlc-go/sdk" +) + +func mysqlType(req *plugin.CodeGenRequest, col *plugin.Column) (string, string, string, bool) { + columnType := sdk.DataType(col.Type) + + switch columnType { + + case "varchar", "text", "char", "tinytext", "mediumtext", "longtext": + if col.NotNull { + return "string", "string", "string", false + } else { + return "string option", "string", "string", false + + } + case "int", "integer", "smallint", "mediumint", "year": + if col.NotNull { + return "int", "int", "int", false + } else { + return "int option", "int", "int", false + + } + + case "bigint": + if col.NotNull { + return "int64", "int64", "int64", false + } else { + return "int64 option", "int64", "int64", false + + } + + case "blob", "binary", "varbinary", "tinyblob", "mediumblob", "longblob": + if col.NotNull { + return "byte[]", "byte[]", "byte[]", false + } else { + return "byte[] option", "byte[]", "byte[]", false + + } + + case "double", "double precision": + if col.NotNull { + return "double", "double", "double", false + } else { + return "double option", "double", "double", false + + } + + case "real": + if col.NotNull { + return "real", "real", "real", false + } else { + return "float option", "real", "real", false + + } + + case "decimal", "dec", "fixed": + if col.NotNull { + return "decimal", "decimal", "decimal", false + } else { + return "decimal option", "decimal", "decimal", false + + } + + case "date", "datetime", "time": + if col.NotNull { + return "DateTime", "DateTime", "DateTime", false + } else { + return "DateTime option", "DateTime", "DateTime", false + + } + + case "timestamp": + if col.NotNull { + return "DateTimeOffset", "DateTimeOffset", "DateTimeOffset", false + } else { + return "DateTimeOffset option", "DateTimeOffset", "DateTimeOffset", false + + } + + case "boolean", "bool", "tinyint": + if col.NotNull { + return "bool", "bool", "bool", false + } else { + return "bool option", "bool", "bool", false + + } + + case "json": + if col.NotNull { + return "string", "string", "string", false + } else { + return "string option", "string", "string", false + + } + + case "any": + return "obj", "obj", "obj", false + + default: + + return columnType, "unhandled_report_issue", "unhandled_report_issue", false + } +} diff --git a/internal/core/postgresql_type.go b/internal/core/postgresql_type.go new file mode 100644 index 0000000..4c29e76 --- /dev/null +++ b/internal/core/postgresql_type.go @@ -0,0 +1,174 @@ +package core + +import ( + "log" + "strings" + + plugin "github.com/tabbed/sqlc-go/codegen" + "github.com/tabbed/sqlc-go/sdk" +) + +// https://www.npgsql.org/doc/types/basic.html +// https://www.postgresql.org/docs/current/datatype-numeric.html +// returns f# type, reader type, library type, enum flag +func postgresType(req *plugin.CodeGenRequest, col *plugin.Column) (string, string, string, bool) { + columnType := strings.ToLower(sdk.DataType(col.Type)) + config, _ := MakeConfig(req) + + if config.TypeAffinity { + switch columnType { + case "serial", "serial4", "pg_catalog.serial4", "smallserial", "serial2", "pg_catalog.serial2", "integer", "int", "int4", "pg_catalog.int4", "smallint", "int2", "pg_catalog.int2": + if col.NotNull { + return "int", "int", "int", false + } else { + return "int option", "intOrNone", "intOrNone", false + } + case "float", "double", "double precision", "float8", "pg_catalog.float8", "real", "float4", "pg_catalog.float4": + + if col.NotNull { + return "double", "double", "double", false + } else { + return "double option", "doubleOrNone", "doubleOrNone", false + } + + } + } + + switch columnType { + case "serial", "serial4", "pg_catalog.serial4": + if col.NotNull { + return "int", "int", "int", false + } else { + return "int option", "intOrNone", "intOrNone", false + } + case "bigserial", "serial8", "pg_catalog.serial8": + if col.NotNull { + return "int64", "int64", "int64", false + } else { + return "int64 option", "int64OrNone", "int64OrNone", false + } + case "smallserial", "serial2", "pg_catalog.serial2": + if col.NotNull { + return "int16", "int16", "int16", false + } else { + return "int16 option", "int16OrNone", "int16OrNone", false + } + case "integer", "int", "int4", "pg_catalog.int4": + if col.NotNull { + return "int", "int", "int", false + } else { + return "int option", "intOrNone", "intOrNone", false + } + case "bigint", "int8", "pg_catalog.int8": + if col.NotNull { + return "int64", "int64", "int64", false + } else { + return "int64 option", "int64OrNone", "int64OrNone", false + } + case "smallint", "int2", "pg_catalog.int2": + if col.NotNull { + return "int16", "int16", "int16", false + } else { + return "int16 option", "int16OrNone", "int16OrNone", false + } + case "float", "double", "double precision", "float8", "pg_catalog.float8": + if col.NotNull { + return "double", "double", "double", false + } else { + return "double option", "doubleOrNone", "doubleOrNone", false + } + case "real", "float4", "pg_catalog.float4": + + if col.NotNull { + return "float32", "real", "real", false + } else { + return "float32 option", "realOrNone", "realOrNone", false + } + case "numeric", "money", "pg_catalog.numeric": + if col.NotNull { + return "decimal", "decimal", "decimal", false + } else { + return "decimal option", "decimalOrNone", "decimalOrNone", false + } + case "boolean", "bool", "pg_catalog.bool": + if col.NotNull { + return "bool", "bool", "bool", false + } else { + return "bool option", "boolOrNone", "boolOrNone", false + } + case "jsonb", "json": + if col.NotNull { + return "string", "string", "jsonb", false + } else { + return "string option", "stringOrNone", "jsonbOrNone", false + } + case "bytea", "blob", "pg_catalog.bytea": + if col.NotNull { + return "byte[]", "bytea", "bytea", false + } else { + return "byte[] option", "byteaOrNone", "byteaOrNone", false + } + case "date": + if col.NotNull { + return "DateOnly", "dateOnly", "date", false + } else { + return "DateOnly option", "dateOnlyOrNone", "dateOrNone", false + } + case "pg_catalog.time": + if col.NotNull { + return "TimeSpan", "interval", "interval", false + } else { + return "TimeSpan option", "intervalOrNone", "intervalOrNone", false + } + case "pg_catalog.timestamp": + if col.NotNull { + return "DateTime", "timestamp", "dateTime", false + } else { + return "DateTime option", "dateTimeOrNone", "timestampOrNone", false + } + case "pg_catalog.timestamptz", "timestamptz", "pg_catalog.timetz": + // TODO + if col.NotNull { + return "DateTimeOffset", "datetimeOffset", "timestamptz", false + } else { + return "DateTimeOffset option", "datetimeOffsetOrNone", "timestamptzOrNone", false + } + case "text": + if col.NotNull { + return "string", "text", "text", false + } else { + return "string option", "textOrNone", "textOrNone", false + } + + case "pg_catalog.varchar", "pg_catalog.bpchar", "string": + if col.NotNull { + return "string", "string", "string", false + } else { + return "string option", "stringOrNone", "stringOrNone", false + } + case "uuid": + if col.NotNull { + return "Guid", "uuid", "uuid", false + } else { + return "Guid option", "uuidOrNone", "uuidOrNone", false + } + + case "point": + if col.NotNull { + return "NpgsqlPoint", "point", "point", false + } else { + return "NpgsqlPoint option", "pointOrNone", "pointOrNone", false + } + + case "void", "null", "NULL": + // TODO + // A void value always returns NULL. Since there is no built-in NULL + // value into the SQL package, we'll use sql.NullBool + return "System.Nullable", "dbNull", "dbNull", false + + default: + // TODO Enums + log.Printf("unknown PostgreSQL type: %s\n", columnType) + return columnType, "unhandled_report_issue", "unhandled_report_issue", false + } +} diff --git a/internal/core/sqlite_type.go b/internal/core/sqlite_type.go new file mode 100644 index 0000000..ec09042 --- /dev/null +++ b/internal/core/sqlite_type.go @@ -0,0 +1,86 @@ +package core + +import ( + "strings" + + plugin "github.com/tabbed/sqlc-go/codegen" + "github.com/tabbed/sqlc-go/sdk" +) + +// https://learn.microsoft.com/en-us/dotnet/standard/data/sqlite/types +func sqliteType(req *plugin.CodeGenRequest, col *plugin.Column) (string, string, string, bool) { + + columnType := strings.ToLower(sdk.DataType(col.Type)) + notNull := col.NotNull || col.IsArray + + switch columnType { + + case "int", "integer", "tinyint", "smallint", "mediumint", "bigint", "unsignedbigint", "int2", "int8": + if notNull { + return "int", "int", "int", false + } else { + return "int option", "intOrNone", "intOrNone", false + } + case "blob": + if notNull { + return "byte[]", "bytes", "bytes", false + } else { + return "byte[] option", "bytesOrNone", "bytesOrNone", false + } + case "real", "double", "doubleprecision", "float": + if notNull { + return "double", "double", "double", false + } else { + return "double option", "doubleOrNone", "doubleOrNone", false + } + case "boolean", "bool": + if col.NotNull { + return "bool", "bool", "bool", false + } else { + return "bool option", "boolOrNone", "boolOrNone", false + } + + case "date", "datetime": + if notNull { + return "DateTime", "dateTime", "dateTime", false + } else { + return "DateTime option", "dateTimeOrNone", "dateTimeOrNone", false + } + case "timestamp": + if notNull { + return "DateTimeOffset", "dateTimeOffset", "dateTimeOffset", false + } else { + return "DateTimeOffset option", "dateTimeOffsetOrNone", "dateTimeOffsetOrNone", false + } + + } + + switch { + + case strings.HasPrefix(columnType, "character"), + strings.HasPrefix(columnType, "varchar"), + strings.HasPrefix(columnType, "varyingcharacter"), + strings.HasPrefix(columnType, "nchar"), + strings.HasPrefix(columnType, "nativecharacter"), + strings.HasPrefix(columnType, "nvarchar"), + columnType == "text", + columnType == "clob": + if notNull { + return "string", "string", "string", false + } else { + return "string option", "stringOrNone", "stringOrNone", false + } + + case strings.HasPrefix(columnType, "decimal"), columnType == "numeric": + if notNull { + return "decimal", "decimal", "decimal", false + } else { + return "decimal option", "decimalOrNone", "decimalOrNone", false + } + + default: + return columnType, "unhandled_report_issue", "unhandled_report_issue", false + + } + +} diff --git a/internal/gen.go b/internal/gen.go new file mode 100644 index 0000000..155394b --- /dev/null +++ b/internal/gen.go @@ -0,0 +1,84 @@ +package fsharp + +import ( + "bufio" + "bytes" + "context" + "io" + "strings" + + plugin "github.com/tabbed/sqlc-go/codegen" + + "github.com/kaashyapan/sqlc-gen-fsharp/internal/core" + "github.com/kaashyapan/sqlc-gen-fsharp/internal/tmpl" +) + +func Generate(ctx context.Context, req *plugin.Request) (*plugin.Response, error) { + conf, err := core.MakeConfig(req) + + enums := core.BuildEnums(req) + structs := core.BuildDataClasses(conf, req) + queries, err := core.BuildQueries(req, structs) + if err != nil { + return nil, err + } + + i := &core.Importer{ + Settings: req.Settings, + Enums: enums, + DataClasses: structs, + Queries: queries, + } + + core.DefaultImporter = i + + tctx := core.TmplCtx{ + Settings: req.Settings, + Q: `"""`, + Package: conf.Package, + Queries: queries, + Enums: enums, + DataClasses: structs, + SqlcVersion: req.SqlcVersion, + Configuration: conf, + } + + output := map[string]string{} + + execute := func(name string, f func(io.Writer, core.TmplCtx) error) error { + var b bytes.Buffer + w := bufio.NewWriter(&b) + tctx.SourceName = name + err := f(w, tctx) + w.Flush() + if err != nil { + return err + } + if !strings.HasSuffix(name, ".fs") { + name += ".fs" + } + output[name] = core.Format(b.String()) + return nil + } + + if err := execute("Models.fs", tmpl.Models); err != nil { + return nil, err + } + if err := execute("Readers.fs", tmpl.Reader); err != nil { + return nil, err + } + if err := execute("Queries.fs", tmpl.SQL); err != nil { + return nil, err + } + + resp := plugin.CodeGenResponse{} + + for filename, code := range output { + resp.Files = append(resp.Files, &plugin.File{ + Name: filename, + Contents: []byte(code), + }) + } + + return &resp, nil +} diff --git a/internal/inflection/singular.go b/internal/inflection/singular.go new file mode 100644 index 0000000..518b5ed --- /dev/null +++ b/internal/inflection/singular.go @@ -0,0 +1,36 @@ +package inflection + +import ( + "strings" + + upstream "github.com/jinzhu/inflection" +) + +type SingularParams struct { + Name string + Exclusions []string +} + +func Singular(s SingularParams) string { + for _, exclusion := range s.Exclusions { + if strings.EqualFold(s.Name, exclusion) { + return s.Name + } + } + + // Manual fix for incorrect handling of "campus" + // + // https://github.com/kyleconroy/sqlc/issues/430 + // https://github.com/jinzhu/inflection/issues/13 + if strings.ToLower(s.Name) == "campus" { + return s.Name + } + // Manual fix for incorrect handling of "meta" + // + // https://github.com/kyleconroy/sqlc/issues/1217 + // https://github.com/jinzhu/inflection/issues/21 + if strings.ToLower(s.Name) == "meta" { + return s.Name + } + return upstream.Singular(s.Name) +} diff --git a/internal/templates/models.qtpl b/internal/templates/models.qtpl new file mode 100644 index 0000000..dc7f320 --- /dev/null +++ b/internal/templates/models.qtpl @@ -0,0 +1,37 @@ +{% import ( + "github.com/kaashyapan/sqlc-gen-fsharp/internal/core" + "github.com/tabbed/sqlc-go/sdk" +) +%} + +{% func Models(ctx core.TmplCtx) %} +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc {%s ctx.SqlcVersion %} + +namespace {%s ctx.Package %} + +{%- for _, mod := range core.Imports(ctx.SourceName, ctx.Package) -%} +open {%s mod %} +{%- endfor -%} + +{% for _, recordType := range ctx.DataClasses %} +type {%s recordType.Name %} = + { + {%- for _, field := range recordType.Fields -%} + {%s sdk.Title(field.Name) %} : {%s field.Type.Name %} + {%- endfor -%} + } +{% endfor %} + +{% for _, q := range ctx.Queries %} +{% if q.Ret.EmitStruct() %} +type {%s q.Ret.Type()%} = + { + {%- for _, f := range q.Ret.Struct.Fields -%} + {%s sdk.Title(f.Name) %} : {%s f.Type.String() %} + {%- endfor -%} + } +{% endif %} +{% endfor %} + +{% endfunc %} diff --git a/internal/templates/models.qtpl.go b/internal/templates/models.qtpl.go new file mode 100644 index 0000000..2545d02 --- /dev/null +++ b/internal/templates/models.qtpl.go @@ -0,0 +1,165 @@ +// Code generated by qtc from "models.qtpl". DO NOT EDIT. +// See https://github.com/valyala/quicktemplate for details. + +//line models.qtpl:1 +package templates + +//line models.qtpl:1 +import ( + "github.com/kaashyapan/sqlc-gen-fsharp/internal/core" + "github.com/tabbed/sqlc-go/sdk" +) + +//line models.qtpl:7 +import ( + qtio422016 "io" + + qt422016 "github.com/valyala/quicktemplate" +) + +//line models.qtpl:7 +var ( + _ = qtio422016.Copy + _ = qt422016.AcquireByteBuffer +) + +//line models.qtpl:7 +func StreamModels(qw422016 *qt422016.Writer, ctx core.TmplCtx) { +//line models.qtpl:7 + qw422016.N().S(` +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc `) +//line models.qtpl:9 + qw422016.E().S(ctx.SqlcVersion) +//line models.qtpl:9 + qw422016.N().S(` + +namespace `) +//line models.qtpl:11 + qw422016.E().S(ctx.Package) +//line models.qtpl:11 + qw422016.N().S(` + +`) +//line models.qtpl:13 + for _, mod := range core.Imports(ctx.SourceName, ctx.Package) { +//line models.qtpl:13 + qw422016.N().S(`open `) +//line models.qtpl:14 + qw422016.E().S(mod) +//line models.qtpl:14 + qw422016.N().S(` +`) +//line models.qtpl:15 + } +//line models.qtpl:15 + qw422016.N().S(` +`) +//line models.qtpl:17 + for _, recordType := range ctx.DataClasses { +//line models.qtpl:17 + qw422016.N().S(` +type `) +//line models.qtpl:18 + qw422016.E().S(recordType.Name) +//line models.qtpl:18 + qw422016.N().S(` = + { +`) +//line models.qtpl:20 + for _, field := range recordType.Fields { +//line models.qtpl:20 + qw422016.N().S(` `) +//line models.qtpl:21 + qw422016.E().S(sdk.Title(field.Name)) +//line models.qtpl:21 + qw422016.N().S(` : `) +//line models.qtpl:21 + qw422016.E().S(field.Type.Name) +//line models.qtpl:21 + qw422016.N().S(` +`) +//line models.qtpl:22 + } +//line models.qtpl:22 + qw422016.N().S(` } +`) +//line models.qtpl:24 + } +//line models.qtpl:24 + qw422016.N().S(` + +`) +//line models.qtpl:26 + for _, q := range ctx.Queries { +//line models.qtpl:26 + qw422016.N().S(` +`) +//line models.qtpl:27 + if q.Ret.EmitStruct() { +//line models.qtpl:27 + qw422016.N().S(` +type `) +//line models.qtpl:28 + qw422016.E().S(q.Ret.Type()) +//line models.qtpl:28 + qw422016.N().S(` = + { +`) +//line models.qtpl:30 + for _, f := range q.Ret.Struct.Fields { +//line models.qtpl:30 + qw422016.N().S(` `) +//line models.qtpl:31 + qw422016.E().S(sdk.Title(f.Name)) +//line models.qtpl:31 + qw422016.N().S(` : `) +//line models.qtpl:31 + qw422016.E().S(f.Type.String()) +//line models.qtpl:31 + qw422016.N().S(` +`) +//line models.qtpl:32 + } +//line models.qtpl:32 + qw422016.N().S(` } +`) +//line models.qtpl:34 + } +//line models.qtpl:34 + qw422016.N().S(` +`) +//line models.qtpl:35 + } +//line models.qtpl:35 + qw422016.N().S(` + +`) +//line models.qtpl:37 +} + +//line models.qtpl:37 +func WriteModels(qq422016 qtio422016.Writer, ctx core.TmplCtx) { +//line models.qtpl:37 + qw422016 := qt422016.AcquireWriter(qq422016) +//line models.qtpl:37 + StreamModels(qw422016, ctx) +//line models.qtpl:37 + qt422016.ReleaseWriter(qw422016) +//line models.qtpl:37 +} + +//line models.qtpl:37 +func Models(ctx core.TmplCtx) string { +//line models.qtpl:37 + qb422016 := qt422016.AcquireByteBuffer() +//line models.qtpl:37 + WriteModels(qb422016, ctx) +//line models.qtpl:37 + qs422016 := string(qb422016.B) +//line models.qtpl:37 + qt422016.ReleaseByteBuffer(qb422016) +//line models.qtpl:37 + return qs422016 +//line models.qtpl:37 +} diff --git a/internal/templates/queries.qtpl b/internal/templates/queries.qtpl new file mode 100644 index 0000000..6d3bbd9 --- /dev/null +++ b/internal/templates/queries.qtpl @@ -0,0 +1,55 @@ +{% import ( + "github.com/kaashyapan/sqlc-gen-fsharp/internal/core" +) +%} + +{% func Queries(ctx core.TmplCtx) %} +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc {%s ctx.SqlcVersion %} + +namespace {%s ctx.Package %} + +{%- for _, mod := range core.Imports(ctx.SourceName, ctx.Package) -%} +open {%s mod %} +{%- endfor -%} + +{% if ctx.Settings.Engine == "sqlite" %} +module Sql = Sqlite +type Sql = Sqlite +{% endif %} + +module Sqls = +{% for _, query := range ctx.Queries %} + [] + let {%s= query.ConstantName %} = + """ + {%s= query.SQL %} + """ +{% endfor %} +{%code + connstr := ctx.ConnString() +%} +[] +type DB (conn: string) = +{%- for _, line := range connstr -%} + {%s= line %} +{%- endfor -%} + + +{% for _, query := range ctx.Queries %} + {%code + pipelines := ctx.ConnPipeline(query) + %} + + {%- for _, line := range query.Comments -%} + ///{%s= line %} + {%- endfor -%} + member this.{%s query.ConstantName %} ({%s query.Arg.Args() %}) = + + {%- for _, line := range pipelines -%} + {%s= line %} + {%- endfor -%} + +{% endfor %} + +{% endfunc %} diff --git a/internal/templates/queries.qtpl.go b/internal/templates/queries.qtpl.go new file mode 100644 index 0000000..a0b432a --- /dev/null +++ b/internal/templates/queries.qtpl.go @@ -0,0 +1,199 @@ +// Code generated by qtc from "queries.qtpl". DO NOT EDIT. +// See https://github.com/valyala/quicktemplate for details. + +//line queries.qtpl:1 +package templates + +//line queries.qtpl:1 +import ( + "github.com/kaashyapan/sqlc-gen-fsharp/internal/core" +) + +//line queries.qtpl:6 +import ( + qtio422016 "io" + + qt422016 "github.com/valyala/quicktemplate" +) + +//line queries.qtpl:6 +var ( + _ = qtio422016.Copy + _ = qt422016.AcquireByteBuffer +) + +//line queries.qtpl:6 +func StreamQueries(qw422016 *qt422016.Writer, ctx core.TmplCtx) { +//line queries.qtpl:6 + qw422016.N().S(` +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc `) +//line queries.qtpl:8 + qw422016.E().S(ctx.SqlcVersion) +//line queries.qtpl:8 + qw422016.N().S(` + +namespace `) +//line queries.qtpl:10 + qw422016.E().S(ctx.Package) +//line queries.qtpl:10 + qw422016.N().S(` + +`) +//line queries.qtpl:12 + for _, mod := range core.Imports(ctx.SourceName, ctx.Package) { +//line queries.qtpl:12 + qw422016.N().S(`open `) +//line queries.qtpl:13 + qw422016.E().S(mod) +//line queries.qtpl:13 + qw422016.N().S(` +`) +//line queries.qtpl:14 + } +//line queries.qtpl:14 + qw422016.N().S(` +`) +//line queries.qtpl:16 + if ctx.Settings.Engine == "sqlite" { +//line queries.qtpl:16 + qw422016.N().S(` +module Sql = Sqlite +type Sql = Sqlite +`) +//line queries.qtpl:19 + } +//line queries.qtpl:19 + qw422016.N().S(` + +module Sqls = +`) +//line queries.qtpl:22 + for _, query := range ctx.Queries { +//line queries.qtpl:22 + qw422016.N().S(` + [] + let `) +//line queries.qtpl:24 + qw422016.N().S(query.ConstantName) +//line queries.qtpl:24 + qw422016.N().S(` = + """ + `) +//line queries.qtpl:26 + qw422016.N().S(query.SQL) +//line queries.qtpl:26 + qw422016.N().S(` + """ +`) +//line queries.qtpl:28 + } +//line queries.qtpl:28 + qw422016.N().S(` +`) +//line queries.qtpl:30 + connstr := ctx.ConnString() + +//line queries.qtpl:31 + qw422016.N().S(` +[] +type DB (conn: string) = +`) +//line queries.qtpl:34 + for _, line := range connstr { +//line queries.qtpl:34 + qw422016.N().S(` `) +//line queries.qtpl:35 + qw422016.N().S(line) +//line queries.qtpl:35 + qw422016.N().S(` +`) +//line queries.qtpl:36 + } +//line queries.qtpl:36 + qw422016.N().S(` + +`) +//line queries.qtpl:39 + for _, query := range ctx.Queries { +//line queries.qtpl:39 + qw422016.N().S(` + `) +//line queries.qtpl:41 + pipelines := ctx.ConnPipeline(query) + +//line queries.qtpl:42 + qw422016.N().S(` + +`) +//line queries.qtpl:44 + for _, line := range query.Comments { +//line queries.qtpl:44 + qw422016.N().S(` ///`) +//line queries.qtpl:45 + qw422016.N().S(line) +//line queries.qtpl:45 + qw422016.N().S(` +`) +//line queries.qtpl:46 + } +//line queries.qtpl:46 + qw422016.N().S(` member this.`) +//line queries.qtpl:47 + qw422016.E().S(query.ConstantName) +//line queries.qtpl:47 + qw422016.N().S(` (`) +//line queries.qtpl:47 + qw422016.E().S(query.Arg.Args()) +//line queries.qtpl:47 + qw422016.N().S(`) = + +`) +//line queries.qtpl:49 + for _, line := range pipelines { +//line queries.qtpl:49 + qw422016.N().S(` `) +//line queries.qtpl:50 + qw422016.N().S(line) +//line queries.qtpl:50 + qw422016.N().S(` +`) +//line queries.qtpl:51 + } +//line queries.qtpl:51 + qw422016.N().S(` +`) +//line queries.qtpl:53 + } +//line queries.qtpl:53 + qw422016.N().S(` + +`) +//line queries.qtpl:55 +} + +//line queries.qtpl:55 +func WriteQueries(qq422016 qtio422016.Writer, ctx core.TmplCtx) { +//line queries.qtpl:55 + qw422016 := qt422016.AcquireWriter(qq422016) +//line queries.qtpl:55 + StreamQueries(qw422016, ctx) +//line queries.qtpl:55 + qt422016.ReleaseWriter(qw422016) +//line queries.qtpl:55 +} + +//line queries.qtpl:55 +func Queries(ctx core.TmplCtx) string { +//line queries.qtpl:55 + qb422016 := qt422016.AcquireByteBuffer() +//line queries.qtpl:55 + WriteQueries(qb422016, ctx) +//line queries.qtpl:55 + qs422016 := string(qb422016.B) +//line queries.qtpl:55 + qt422016.ReleaseByteBuffer(qb422016) +//line queries.qtpl:55 + return qs422016 +//line queries.qtpl:55 +} diff --git a/internal/templates/readers.qtpl b/internal/templates/readers.qtpl new file mode 100644 index 0000000..34ae193 --- /dev/null +++ b/internal/templates/readers.qtpl @@ -0,0 +1,25 @@ +{% import ( + "github.com/kaashyapan/sqlc-gen-fsharp/internal/core" +) +%} + +{% func Readers(ctx core.TmplCtx) %} +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc {%s ctx.SqlcVersion %} + +namespace {%s ctx.Package %} + +{%- for _, mod := range core.Imports(ctx.SourceName, ctx.Package) -%} +open {%s mod %} +{%- endfor -%} + +{% if ctx.Settings.Engine == "sqlite" %} +type RowReader = SqliteRowReader +{% endif %} + +module Readers = +{% for _, record := range ctx.ReaderSet() %} + {%s= record %} +{% endfor %} + +{% endfunc %} diff --git a/internal/templates/readers.qtpl.go b/internal/templates/readers.qtpl.go new file mode 100644 index 0000000..28f035d --- /dev/null +++ b/internal/templates/readers.qtpl.go @@ -0,0 +1,113 @@ +// Code generated by qtc from "readers.qtpl". DO NOT EDIT. +// See https://github.com/valyala/quicktemplate for details. + +//line readers.qtpl:1 +package templates + +//line readers.qtpl:1 +import ( + "github.com/kaashyapan/sqlc-gen-fsharp/internal/core" +) + +//line readers.qtpl:6 +import ( + qtio422016 "io" + + qt422016 "github.com/valyala/quicktemplate" +) + +//line readers.qtpl:6 +var ( + _ = qtio422016.Copy + _ = qt422016.AcquireByteBuffer +) + +//line readers.qtpl:6 +func StreamReaders(qw422016 *qt422016.Writer, ctx core.TmplCtx) { +//line readers.qtpl:6 + qw422016.N().S(` +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc `) +//line readers.qtpl:8 + qw422016.E().S(ctx.SqlcVersion) +//line readers.qtpl:8 + qw422016.N().S(` + +namespace `) +//line readers.qtpl:10 + qw422016.E().S(ctx.Package) +//line readers.qtpl:10 + qw422016.N().S(` + +`) +//line readers.qtpl:12 + for _, mod := range core.Imports(ctx.SourceName, ctx.Package) { +//line readers.qtpl:12 + qw422016.N().S(`open `) +//line readers.qtpl:13 + qw422016.E().S(mod) +//line readers.qtpl:13 + qw422016.N().S(` +`) +//line readers.qtpl:14 + } +//line readers.qtpl:14 + qw422016.N().S(` +`) +//line readers.qtpl:16 + if ctx.Settings.Engine == "sqlite" { +//line readers.qtpl:16 + qw422016.N().S(` +type RowReader = SqliteRowReader +`) +//line readers.qtpl:18 + } +//line readers.qtpl:18 + qw422016.N().S(` + +module Readers = +`) +//line readers.qtpl:21 + for _, record := range ctx.ReaderSet() { +//line readers.qtpl:21 + qw422016.N().S(` + `) +//line readers.qtpl:22 + qw422016.N().S(record) +//line readers.qtpl:22 + qw422016.N().S(` +`) +//line readers.qtpl:23 + } +//line readers.qtpl:23 + qw422016.N().S(` + +`) +//line readers.qtpl:25 +} + +//line readers.qtpl:25 +func WriteReaders(qq422016 qtio422016.Writer, ctx core.TmplCtx) { +//line readers.qtpl:25 + qw422016 := qt422016.AcquireWriter(qq422016) +//line readers.qtpl:25 + StreamReaders(qw422016, ctx) +//line readers.qtpl:25 + qt422016.ReleaseWriter(qw422016) +//line readers.qtpl:25 +} + +//line readers.qtpl:25 +func Readers(ctx core.TmplCtx) string { +//line readers.qtpl:25 + qb422016 := qt422016.AcquireByteBuffer() +//line readers.qtpl:25 + WriteReaders(qb422016, ctx) +//line readers.qtpl:25 + qs422016 := string(qb422016.B) +//line readers.qtpl:25 + qt422016.ReleaseByteBuffer(qb422016) +//line readers.qtpl:25 + return qs422016 +//line readers.qtpl:25 +} diff --git a/internal/tmpl/models.go b/internal/tmpl/models.go new file mode 100644 index 0000000..5365e35 --- /dev/null +++ b/internal/tmpl/models.go @@ -0,0 +1,140 @@ +package tmpl + +import ( + "fmt" + "io" + + "github.com/kaashyapan/sqlc-gen-fsharp/internal/core" + "github.com/kaashyapan/sqlc-gen-fsharp/internal/templates" + "github.com/tabbed/sqlc-go/sdk" +) + +func Models(w io.Writer, dot core.TmplCtx) (err error) { + defer func() { + if recovered := recover(); recovered != nil { + var ok bool + if err, ok = recovered.(error); !ok { + panic(recovered) + } + } + }() + + templates.WriteModels(w, dot) + return nil + + //return fsModelsTmpl(w, dot) +} + +func fsModelsTmpl(w io.Writer, dot core.TmplCtx) error { + + _, _ = io.WriteString(w, "// Code generated by sqlc. DO NOT EDIT.\n// versions:\n// sqlc ") + _, _ = io.WriteString(w, dot.SqlcVersion) + _, _ = io.WriteString(w, "\n\nmodule ") + _, _ = io.WriteString(w, dot.Package) + _, _ = io.WriteString(w, ".Models ") + _, _ = io.WriteString(w, "\n\n") + for _, dot := range core.Imports(dot.SourceName, dot.Package) { + _ = dot + _, _ = io.WriteString(w, "\n") + _ = dot + _, _ = io.WriteString(w, "open ") + _, _ = io.WriteString(w, dot) + + } + + _, _ = io.WriteString(w, "\n") + if eval := dot.Enums; len(eval) != 0 { + for _, dot := range eval { + _ = dot + _, _ = io.WriteString(w, "\n") + if eval := dot.Comment; len(eval) != 0 { + _, _ = io.WriteString(w, core.DoubleSlashComment(dot.Comment)) + } + _, _ = io.WriteString(w, "\nenum class ") + _, _ = io.WriteString(w, dot.Name) + _, _ = io.WriteString(w, "(val value: String) {") + if eval := dot.Constants; len(eval) != 0 { + for _Vari, _Vare := range eval { + _ = _Vari + dot := _Vare + _ = dot + if eval := _Vari; eval != 0 { + _, _ = io.WriteString(w, ",") + } + _, _ = io.WriteString(w, "\n ") + _, _ = io.WriteString(w, dot.Name) + _, _ = io.WriteString(w, "(\"") + _, _ = io.WriteString(w, dot.Value) + _, _ = io.WriteString(w, "\")") + } + } + _, _ = io.WriteString(w, ";\n\n companion object {\n private val map = ") + _, _ = io.WriteString(w, dot.Name) + _, _ = io.WriteString(w, ".values().associateBy(") + _, _ = io.WriteString(w, dot.Name) + _, _ = io.WriteString(w, "::value)\n fun lookup(value: String) = map[value]\n }\n}\n") + } + } + _, _ = io.WriteString(w, "\n\n") + + if eval := dot.DataClasses; len(eval) != 0 { + for _, dot := range eval { + _ = dot + _, _ = io.WriteString(w, "\n") + if eval := dot.Comment; len(eval) != 0 { + _, _ = io.WriteString(w, core.DoubleSlashComment(dot.Comment)) + } + _, _ = io.WriteString(w, "\ntype ") + _, _ = io.WriteString(w, dot.Name) + _, _ = io.WriteString(w, " = {") + if eval := dot.Fields; len(eval) != 0 { + for _Vari, _Vare := range eval { + _ = _Vari + dot := _Vare + _ = dot + + if eval := dot.Comment; len(eval) != 0 { + _, _ = io.WriteString(w, "\n ") + _, _ = io.WriteString(w, core.DoubleSlashComment(dot.Comment)) + } + + _, _ = io.WriteString(w, "\n ") + _, _ = io.WriteString(w, sdk.Title(dot.Name)) + _, _ = io.WriteString(w, ": ") + _, _ = fmt.Fprint(w, dot.Type) + } + } + _, _ = io.WriteString(w, "\n}\n") + } + } + + if eval := dot.Queries; len(eval) != 0 { + for _, dot := range eval { + _ = dot + + if dot.Ret.EmitStruct() { + _, _ = io.WriteString(w, "type ") + _, _ = io.WriteString(w, dot.Ret.Type()) + _, _ = io.WriteString(w, " = {") + if eval := dot.Ret.Struct.Fields; len(eval) != 0 { + for i, dot := range eval { + _ = dot + if i > 0 { + _, _ = io.WriteString(w, ",") + } + _, _ = io.WriteString(w, "\n ") + _, _ = io.WriteString(w, dot.Name) + _, _ = io.WriteString(w, ": ") + _, _ = io.WriteString(w, dot.Type.String()) + } + } + _, _ = io.WriteString(w, " = }") + + _, _ = io.WriteString(w, "\n)\n\n") + } + } + } + + _, _ = io.WriteString(w, "\n\n") + return nil +} diff --git a/internal/tmpl/queries.go b/internal/tmpl/queries.go new file mode 100644 index 0000000..413d430 --- /dev/null +++ b/internal/tmpl/queries.go @@ -0,0 +1,180 @@ +package tmpl + +import ( + "io" + + "github.com/kaashyapan/sqlc-gen-fsharp/internal/core" + "github.com/kaashyapan/sqlc-gen-fsharp/internal/templates" +) + +func SQL(w io.Writer, dot core.TmplCtx) (err error) { + defer func() { + if recovered := recover(); recovered != nil { + var ok bool + if err, ok = recovered.(error); !ok { + panic(recovered) + } + } + }() + + if dot.Settings.Engine == "mysql" { + return nil + } + + templates.WriteQueries(w, dot) + return nil + //return fsSQLTmpl(w, dot) +} + +func fsSQLTmpl(w io.Writer, dot core.TmplCtx) error { + ctx := dot + _, _ = io.WriteString(w, "// Code generated by sqlc. DO NOT EDIT.\n// versions:\n// sqlc ") + _, _ = io.WriteString(w, dot.SqlcVersion) + _, _ = io.WriteString(w, "\n\nmodule ") + _, _ = io.WriteString(w, dot.Package) + _, _ = io.WriteString(w, ".Queries ") + _, _ = io.WriteString(w, "\n\n") + + for _, dot := range core.Imports(dot.SourceName, dot.Package) { + _ = dot + _, _ = io.WriteString(w, "\n") + _ = dot + _, _ = io.WriteString(w, "open ") + _, _ = io.WriteString(w, dot) + + } + _, _ = io.WriteString(w, "\n\n") + + if eval := dot.Queries; len(eval) != 0 { + for _, dot := range eval { + _ = dot + if eval := dot.Comments; len(eval) != 0 { + _, _ = io.WriteString(w, "\n") + for _, dot := range eval { + _ = dot + _, _ = io.WriteString(w, "\n//") + _, _ = io.WriteString(w, dot) + } + } + _, _ = io.WriteString(w, "let [] ") + _, _ = io.WriteString(w, dot.ConstantName) + _, _ = io.WriteString(w, " = ") + _, _ = io.WriteString(w, ctx.Q) + _, _ = io.WriteString(w, "\n-- name: ") + _, _ = io.WriteString(w, dot.MethodName) + _, _ = io.WriteString(w, " ") + _, _ = io.WriteString(w, dot.Cmd) + _, _ = io.WriteString(w, "\n") + _, _ = io.WriteString(w, dot.SQL) + _, _ = io.WriteString(w, "\n") + _, _ = io.WriteString(w, ctx.Q) + _, _ = io.WriteString(w, "\n\n") + + if dot.Ret.EmitStruct() { + _, _ = io.WriteString(w, "data class ") + _, _ = io.WriteString(w, dot.Ret.Type()) + _, _ = io.WriteString(w, " (") + if eval := dot.Ret.Struct.Fields; len(eval) != 0 { + for i, dot := range eval { + _ = dot + if i > 0 { + _, _ = io.WriteString(w, ",") + } + _, _ = io.WriteString(w, "\n val ") + _, _ = io.WriteString(w, dot.Name) + _, _ = io.WriteString(w, ": ") + _, _ = io.WriteString(w, dot.Type.String()) + } + } + _, _ = io.WriteString(w, "\n)\n\n") + } + } + } + + if dot.Settings.Engine == "mysql" { + return nil + } + + if eval := dot.Queries; len(eval) != 0 { + _, _ = io.WriteString(w, "\ntype Queries(conn: string) = ") + _, _ = io.WriteString(w, "\n ") + + for _, dot := range eval { + _ = dot + if dot.Cmd == ":one" { + if eval := dot.Comments; len(eval) != 0 { + _, _ = io.WriteString(w, "\n") + for _, dot := range eval { + _ = dot + _, _ = io.WriteString(w, "\n//") + _, _ = io.WriteString(w, dot) + } + } + + _, _ = io.WriteString(w, "\n\n member this.") + _, _ = io.WriteString(w, dot.MethodName) + _, _ = io.WriteString(w, "(") + _, _ = io.WriteString(w, dot.Arg.Args()) + _, _ = io.WriteString(w, ") =") + + _, _ = io.WriteString(w, "\n ") + _, _ = io.WriteString(w, core.ExecCommand(ctx, dot)) + _, _ = io.WriteString(w, " ") + + } + if dot.Cmd == ":many" { + if eval := dot.Comments; len(eval) != 0 { + _, _ = io.WriteString(w, "\n") + for _, dot := range eval { + _ = dot + _, _ = io.WriteString(w, "\n//") + _, _ = io.WriteString(w, dot) + } + } + _, _ = io.WriteString(w, "\n\n member this.") + _, _ = io.WriteString(w, dot.MethodName) + _, _ = io.WriteString(w, "(") + _, _ = io.WriteString(w, dot.Arg.Args()) + _, _ = io.WriteString(w, ") =") + + _, _ = io.WriteString(w, "\n ") + _, _ = io.WriteString(w, core.ExecCommand(ctx, dot)) + _, _ = io.WriteString(w, " ") + + } + if dot.Cmd == ":exec" { + if eval := dot.Comments; len(eval) != 0 { + _, _ = io.WriteString(w, "\n") + for _, dot := range eval { + _ = dot + _, _ = io.WriteString(w, "\n//") + _, _ = io.WriteString(w, dot) + } + } + + _, _ = io.WriteString(w, "\n\n member this.") + _, _ = io.WriteString(w, dot.MethodName) + _, _ = io.WriteString(w, "(") + _, _ = io.WriteString(w, dot.Arg.Args()) + _, _ = io.WriteString(w, ") =") + + conn := ctx.ConnString() + for _, line := range conn { + _, _ = io.WriteString(w, line) + } + + pipelines := ctx.ConnPipeline(dot) + for _, line := range pipelines { + _, _ = io.WriteString(w, line) + } + + _, _ = io.WriteString(w, "\n ") + _, _ = io.WriteString(w, core.ExecCommand(ctx, dot)) + + } + + } + } + _, _ = io.WriteString(w, "\n\n") + return nil +} diff --git a/internal/tmpl/readers.go b/internal/tmpl/readers.go new file mode 100644 index 0000000..1d1b18a --- /dev/null +++ b/internal/tmpl/readers.go @@ -0,0 +1,60 @@ +package tmpl + +import ( + "io" + + "github.com/kaashyapan/sqlc-gen-fsharp/internal/core" + "github.com/kaashyapan/sqlc-gen-fsharp/internal/templates" +) + +func Reader(w io.Writer, dot core.TmplCtx) (err error) { + defer func() { + if recovered := recover(); recovered != nil { + var ok bool + if err, ok = recovered.(error); !ok { + panic(recovered) + } + } + }() + + if dot.Settings.Engine != "mysql" { + templates.WriteReaders(w, dot) + } + return nil + //return fsReaderTmpl(w, dot) +} + +func fsReaderTmpl(w io.Writer, dot core.TmplCtx) error { + if dot.Settings.Engine == "mysql" { + return nil + } + _, _ = io.WriteString(w, "// Code generated by sqlc. DO NOT EDIT.\n// versions:\n// sqlc ") + _, _ = io.WriteString(w, dot.SqlcVersion) + _, _ = io.WriteString(w, dot.Package) + _, _ = io.WriteString(w, "\n\nmodule ") + _, _ = io.WriteString(w, dot.Package) + _, _ = io.WriteString(w, ".Readers ") + + _, _ = io.WriteString(w, "\n") + for _, dot := range core.Imports(dot.SourceName, dot.Package) { + _ = dot + _, _ = io.WriteString(w, "\n") + _ = dot + _, _ = io.WriteString(w, "open ") + _, _ = io.WriteString(w, dot) + + } + _, _ = io.WriteString(w, "\n\n") + + readerCnt := len(dot.ReaderSet()) + if readerCnt > 0 { + for _, dot := range dot.ReaderSet() { + _, _ = io.WriteString(w, "\n") + _, _ = io.WriteString(w, dot) + _, _ = io.WriteString(w, "\n") + + } + } + + return nil +} diff --git a/plugin/main.go b/plugin/main.go new file mode 100644 index 0000000..9281161 --- /dev/null +++ b/plugin/main.go @@ -0,0 +1,11 @@ +package main + +import ( + "github.com/tabbed/sqlc-go/codegen" + + fsharp "github.com/kaashyapan/sqlc-gen-fsharp/internal" +) + +func main() { + codegen.Run(fsharp.Generate) +} diff --git a/test/.editorconfig b/test/.editorconfig new file mode 100644 index 0000000..6f3f230 --- /dev/null +++ b/test/.editorconfig @@ -0,0 +1,17 @@ +[*.{fs,fsx}] +fsharp_multiline_bracket_style = aligned +max_line_length=120 +indent_size=2 +fsharp_max_function_binding_width=80 +fsharp_max_array_or_list_width=100 +fsharp_array_or_list_multiline_formatter=character_width +fsharp_max_if_then_else_short_width=120 +fsharp_max_record_width=100 +fsharp_record_multiline_formatter=character_width +fsharp_max_if_then_short_width=100 +fsharp_multiline_block_brackets_on_same_column=true +fsharp_multi_line_lambda_closing_newline=true +fsharp_keep_max_number_of_blank_lines=1 +fsharp_max_dot_get_expression_width=120 +fsharp_max_function_binding_width=120 +fsharp_max_value_binding_width=120 diff --git a/test/.gitignore b/test/.gitignore new file mode 100644 index 0000000..2c7bdd3 --- /dev/null +++ b/test/.gitignore @@ -0,0 +1,3 @@ +bin/ +obj/ +.ionide/ \ No newline at end of file diff --git a/test/Readme.md b/test/Readme.md new file mode 100644 index 0000000..598fb19 --- /dev/null +++ b/test/Readme.md @@ -0,0 +1,19 @@ +To test + +```bash +cd postgres +sqlc generate +cd ../sqlite +sqlc generate +cd ../mysql +sqlc generate +cd ../ +fantomas . +docker-compose up -d +dotnet run +``` + +To build wasm release from directory root +``` +docker run -it --rm -w /src -v ./sqlc-gen-fsharp:/src tinygo/tinygo:0.27.0 tinygo build -o sqlc-gen-fsharp.wasm -target wasi plugin/main.go +``` diff --git a/test/SqlcTest.fsproj b/test/SqlcTest.fsproj new file mode 100644 index 0000000..e65abe2 --- /dev/null +++ b/test/SqlcTest.fsproj @@ -0,0 +1,24 @@ + + + + net7.0 + SqlcTest + false + + + + + + + + + + + + + + + + + + diff --git a/test/docker-compose.yml b/test/docker-compose.yml new file mode 100644 index 0000000..1c799a9 --- /dev/null +++ b/test/docker-compose.yml @@ -0,0 +1,9 @@ +version: "3.1" + +services: + pg: + image: postgres + network_mode: "host" + environment: + POSTGRES_PASSWORD: example + POSTGRES_USER: sqlc diff --git a/test/mysql/query.sql b/test/mysql/query.sql new file mode 100644 index 0000000..ae227e9 --- /dev/null +++ b/test/mysql/query.sql @@ -0,0 +1,18 @@ +-- name: GetEmployee :one +SELECT * FROM employees +WHERE id = @id LIMIT 1; + +-- name: ListEmployees :many +SELECT * FROM employees +ORDER BY name; + +-- name: CreateEmployee :exec +INSERT INTO employees ( + name, occupation, age +) VALUES ( + @name, @occupation, @age +); + +-- name: DeleteEmployee :exec +DELETE FROM employees +WHERE id = @id; diff --git a/test/mysql/schema.sql b/test/mysql/schema.sql new file mode 100644 index 0000000..6e63d4d --- /dev/null +++ b/test/mysql/schema.sql @@ -0,0 +1,7 @@ +CREATE TABLE employees( + id int NOT NULL AUTO_INCREMENT, + name varchar(45) NOT NULL, + occupation varchar(35) NOT NULL, + age int NOT NULL, + PRIMARY KEY (id) +); \ No newline at end of file diff --git a/test/mysql/sqlc.json b/test/mysql/sqlc.json new file mode 100644 index 0000000..c415905 --- /dev/null +++ b/test/mysql/sqlc.json @@ -0,0 +1,27 @@ +{ + "version": "2", + "plugins": [ + { + "name": "fsharp", + "process": { + "cmd": "/home/ubuntu/bin/sqlc-gen-fsharp" + } + } + ], + "sql": [ + { + "engine": "mysql", + "schema": "schema.sql", + "queries": "query.sql", + "codegen": [ + { + "out": "../src/mysql", + "plugin": "fsharp", + "options": { + "namespace": "MAuthors" + } + } + ] + } + ] +} \ No newline at end of file diff --git a/test/postgres/query.sql b/test/postgres/query.sql new file mode 100644 index 0000000..7dfd23e --- /dev/null +++ b/test/postgres/query.sql @@ -0,0 +1,76 @@ +-- name: GetAuthor :one +SELECT * FROM authors +WHERE id = @id LIMIT 1; + +-- name: ListAuthors :many +SELECT first_name, last_name, dead FROM authors +ORDER BY first_name; + +-- name: CreateAuthor :one +INSERT INTO authors ( + first_name, + ssid, + middle_name, + last_name, + avatar , + dead , + disabled , + address , + country , + spouses , + children , + grandchildren , + bio, + savings_acct , + loan_acct , + deposit_acct , + book_count , + date_of_birth , + t_1 , + t_2 , + ts_1 , + ts_2 , + passport_id , + metadata , + metadatab, + col_fl, + col_real , + col_dbl , + col_fl8 +) VALUES ( + @first_name, + @ssid, + @middle_name, + @last_name, + @avatar , + @dead , + @disabled , + @address , + @country , + @spouses , + @children , + @grandchildren , + @bio, + @savings_acct , + @loan_acct , + @deposit_acct , + @book_count , + @date_of_birth , + @t_1 , + @t_2 , + @ts_1 , + @ts_2 , + @passport_id, + @metadata , + @metadatab , + @col_fl , + @col_real , + @col_dbl , + @col_fl8 + +) +RETURNING *; + +-- name: DeleteAuthor :exec +DELETE FROM authors +WHERE id = @id; diff --git a/test/postgres/schema.sql b/test/postgres/schema.sql new file mode 100644 index 0000000..159a815 --- /dev/null +++ b/test/postgres/schema.sql @@ -0,0 +1,35 @@ +-- Example queries for sqlc +DROP TABLE IF EXISTS authors; + +CREATE TABLE authors ( + id BIGSERIAL PRIMARY KEY, + ssid bigint, + first_name CHARACTER VARYING(255) NOT NULL, + middle_name character, + last_name character(20), + avatar bytea, + dead bool, + disabled boolean, + address varchar, + country varchar(20), + spouses int, + children int4, + grandchildren int2, + bio text, + savings_acct money, + loan_acct decimal, + deposit_acct numeric(10,5), + book_count INT NOT NULL, + date_of_birth date, + t_1 time with time zone, + t_2 time without time zone, + ts_1 timestamp with time zone, + ts_2 timestamp without time zone, + passport_id uuid, + metadata json, + metadatab jsonb, + col_fl float4, + col_real real, + col_dbl double precision, + col_fl8 float8 +); diff --git a/test/postgres/sqlc.json b/test/postgres/sqlc.json new file mode 100644 index 0000000..91b655d --- /dev/null +++ b/test/postgres/sqlc.json @@ -0,0 +1,29 @@ +{ + "version": "2", + "plugins": [ + { + "name": "fsharp", + "process": { + "cmd": "/home/ubuntu/bin/sqlc-gen-fsharp" + } + } + ], + "sql": [ + { + "engine": "postgresql", + "schema": "schema.sql", + "queries": "query.sql", + "codegen": [ + { + "out": "../src/pg", + "plugin": "fsharp", + "options": { + "namespace": "PAuthors", + "async": true, + "type_affinity": true + } + } + ] + } + ] +} diff --git a/test/sqlite/query.sql b/test/sqlite/query.sql new file mode 100644 index 0000000..1e0e077 --- /dev/null +++ b/test/sqlite/query.sql @@ -0,0 +1,24 @@ + +-- name: GetAuthor2 :one +SELECT id, name, bio FROM authors +WHERE id = @id LIMIT 1; + +-- name: GetAuthor :one +SELECT * FROM authors +WHERE id = @id LIMIT 1; + +-- name: ListAuthors :many +SELECT * FROM authors +ORDER BY name; + +-- name: CreateAuthor :one +INSERT INTO authors ( + name, bio +) VALUES ( + @name, @bio +) +RETURNING *; + +-- name: DeleteAuthor :exec +DELETE FROM authors +WHERE id = @id; \ No newline at end of file diff --git a/test/sqlite/schema.sql b/test/sqlite/schema.sql new file mode 100644 index 0000000..810d7fe --- /dev/null +++ b/test/sqlite/schema.sql @@ -0,0 +1,14 @@ +-- Example queries for sqlc +CREATE TABLE authors ( + id INTEGER PRIMARY KEY, + name TEXT NOT NULL, + bio TEXT, + address TEXT, + date_of_birth DATE, + last_ts TIMESTAMP, + savings_amt REAL, + loan_amt NUMERIC, + disabled BOOLEAN, + married BOOL, + payable DECIMAL +); diff --git a/test/sqlite/sqlc.json b/test/sqlite/sqlc.json new file mode 100644 index 0000000..8282dca --- /dev/null +++ b/test/sqlite/sqlc.json @@ -0,0 +1,27 @@ +{ + "version": "2", + "plugins": [ + { + "name": "fsharp", + "process": { + "cmd": "/home/ubuntu/bin/sqlc-gen-fsharp" + } + } + ], + "sql": [ + { + "engine": "sqlite", + "schema": "schema.sql", + "queries": "query.sql", + "codegen": [ + { + "out": "../src/sqlite", + "plugin": "fsharp", + "options": { + "namespace": "SAuthors" + } + } + ] + } + ] +} \ No newline at end of file diff --git a/test/src/Postgres.fs b/test/src/Postgres.fs new file mode 100644 index 0000000..a988050 --- /dev/null +++ b/test/src/Postgres.fs @@ -0,0 +1,86 @@ +module Postgres + +open System +open System.Text +open Npgsql +open Npgsql.FSharp +open PAuthors +open FSharp.Data.LiteralProviders + +module Async = + let map f workflow = + task { + let! res = workflow + return f res + } + +[] +let initsql = TextFile<"postgres/schema.sql">.Text + +[] +let conn = "Server=localhost;Port=5432;Database=postgres;User Id=sqlc;Password=example;" + +let initiate () = + let c = conn |> Sql.connect |> Sql.createConnection + c.Open() + let cmd = new NpgsqlCommand(initsql, c) + printfn "%A" <| cmd.ExecuteNonQuery() + +let run () = + let db = PAuthors.DB(conn) + + printfn "\n-------------------------------------------------------------------- \n" + printfn "Initiating postgres DB" + + ignore <| initiate () + + task { + do! db.listAuthors () |> Async.map (printfn "List authors - %A") + + do! db.createAuthor ("Jeff Bezos", 5) |> Async.map (printfn "Create authors - %A") + + do! + db.createAuthor ( + firstName = "Elon", + bookCount = 2, + ssid = int64 868, + middleName = "E", + lastName = "musk", + dead = false, + avatar = Encoding.UTF8.GetBytes("avatar"), + disabled = false, + address = "California", + country = "USA", + spouses = 2, + bio = "Twitter CEO", + savingsAcct = decimal 434.23, + children = 4, + grandchildren = 4, //int16 4, + loanAcct = decimal 234.57, + depositAcct = decimal 89.0, + dateOfBirth = DateOnly.FromDateTime(DateTime.Today), + t2 = TimeSpan.FromTicks(34567), + t1 = DateTimeOffset.UtcNow, + ts1 = DateTimeOffset.UtcNow, + ts2 = DateTime.Now, + passportId = Guid.NewGuid(), + metadata = """{"key" : "value"}""", + metadatab = """{"key" : "value"}""", + colFl = 3.1475 , // float32 3.1475, + colReal = 3.1475, //float32 3.1475, + colDbl = double 3.1475, + colFl8 = double 3.1475 + ) + + |> Async.map (printfn "Create authors - %A") + + //do! db.listAuthors () |> Async.map (printfn "List authors - %A") + let! author = db.getAuthor 1 + printfn "Get authors - %A" author + do! db.deleteAuthor (author.Id) |> Async.map (printfn "Delete authors - %A") + + } + |> Async.AwaitTask + |> Async.RunSynchronously + + () diff --git a/test/src/Program.fs b/test/src/Program.fs new file mode 100644 index 0000000..5443426 --- /dev/null +++ b/test/src/Program.fs @@ -0,0 +1,10 @@ +module SqlcTest + +open System + +[] +let main args = + + Postgres.run () + Sqlite.run () + 0 diff --git a/test/src/Sqlite.fs b/test/src/Sqlite.fs new file mode 100644 index 0000000..29e6231 --- /dev/null +++ b/test/src/Sqlite.fs @@ -0,0 +1,48 @@ +module Sqlite + +open System +open Fumble +open FSharp.Data.LiteralProviders +open SAuthors +open System.IO + +[] +let initsql = TextFile<"sqlite/schema.sql">.Text + +[] +let conn = "Data Source=/tmp/sample.db;" + +let initiate () = + conn + |> Sqlite.connect + |> Sqlite.query initsql + |> Sqlite.executeNonQuery + |> printfn "%A" + +let run () = + let db = SAuthors.DB(conn) + + printfn "\n----------------------------------------------------------------- \n" + printfn "Initiating Sqlite DB" + + ignore <| initiate () + + db.listAuthors () |> printfn "List authors - %A" + + db.createAuthor ("Elon Musk", "CEO, CTO") |> printfn "Create authors - %A" + + db.createAuthor ("Jeff Bezos", "Chairman Amazon") + |> function + | Ok rows -> + let r = List.head rows + db.deleteAuthor (r.Id) |> printfn "Delete authors - %A" + + | Error e -> raise e + + db.listAuthors () |> printfn "List authors - %A" + + db.listAuthors () |> printfn "List authors - %A" + db.getAuthor (1) |> printfn "Get authors - %A" + + File.Delete("/tmp/sample.db") + () diff --git a/test/src/mysql/Models.fs b/test/src/mysql/Models.fs new file mode 100644 index 0000000..9c6af3c --- /dev/null +++ b/test/src/mysql/Models.fs @@ -0,0 +1,8 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace MAuthors + +open System + +type Employee = { Id: int; Name: string; Occupation: string; Age: int } diff --git a/test/src/mysql/Queries.fs b/test/src/mysql/Queries.fs new file mode 100644 index 0000000..e69de29 diff --git a/test/src/mysql/Readers.fs b/test/src/mysql/Readers.fs new file mode 100644 index 0000000..e69de29 diff --git a/test/src/pg/Models.fs b/test/src/pg/Models.fs new file mode 100644 index 0000000..e3003f1 --- /dev/null +++ b/test/src/pg/Models.fs @@ -0,0 +1,43 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace PAuthors + +open System +open Npgsql + +type Author = + { + Id: int64 + Ssid: int64 option + FirstName: string + MiddleName: string option + LastName: string option + Avatar: byte[] option + Dead: bool option + Disabled: bool option + Address: string option + Country: string option + Spouses: int option + Children: int option + Grandchildren: int option + Bio: string option + SavingsAcct: decimal option + LoanAcct: decimal option + DepositAcct: decimal option + BookCount: int + DateOfBirth: DateOnly option + T1: DateTimeOffset option + T2: TimeSpan option + Ts1: DateTimeOffset option + Ts2: DateTime option + PassportId: Guid option + Metadata: string option + Metadatab: string option + ColFl: double option + ColReal: double option + ColDbl: double option + ColFl8: double option + } + +type ListAuthorsRow = { FirstName: string; LastName: string option; Dead: bool option } diff --git a/test/src/pg/Queries.fs b/test/src/pg/Queries.fs new file mode 100644 index 0000000..5bbd55e --- /dev/null +++ b/test/src/pg/Queries.fs @@ -0,0 +1,205 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace PAuthors + +open System +open Npgsql +open Npgsql.FSharp +open PAuthors.Readers + +module Sqls = + + [] + let createAuthor = + """ + INSERT INTO authors ( + first_name, + ssid, + middle_name, + last_name, + avatar , + dead , + disabled , + address , + country , + spouses , + children , + grandchildren , + bio, + savings_acct , + loan_acct , + deposit_acct , + book_count , + date_of_birth , + t_1 , + t_2 , + ts_1 , + ts_2 , + passport_id , + metadata , + metadatab, + col_fl, + col_real , + col_dbl , + col_fl8 +) VALUES ( + @first_name, + @ssid, + @middle_name, + @last_name, + @avatar , + @dead , + @disabled , + @address , + @country , + @spouses , + @children , + @grandchildren , + @bio, + @savings_acct , + @loan_acct , + @deposit_acct , + @book_count , + @date_of_birth , + @t_1 , + @t_2 , + @ts_1 , + @ts_2 , + @passport_id, + @metadata , + @metadatab , + @col_fl , + @col_real , + @col_dbl , + @col_fl8 + +) +RETURNING id, ssid, first_name, middle_name, last_name, avatar, dead, disabled, address, country, spouses, children, grandchildren, bio, savings_acct, loan_acct, deposit_acct, book_count, date_of_birth, t_1, t_2, ts_1, ts_2, passport_id, metadata, metadatab, col_fl, col_real, col_dbl, col_fl8 + """ + + [] + let deleteAuthor = + """ + DELETE FROM authors +WHERE id = @id + """ + + [] + let getAuthor = + """ + SELECT id, ssid, first_name, middle_name, last_name, avatar, dead, disabled, address, country, spouses, children, grandchildren, bio, savings_acct, loan_acct, deposit_acct, book_count, date_of_birth, t_1, t_2, ts_1, ts_2, passport_id, metadata, metadatab, col_fl, col_real, col_dbl, col_fl8 FROM authors +WHERE id = @id LIMIT 1 + """ + + [] + let listAuthors = + """ + SELECT first_name, last_name, dead FROM authors +ORDER BY first_name + """ + +[] +type DB(conn: string) = + + // https://www.connectionstrings.com/npgsql + + member this.createAuthor + ( + firstName: string, + bookCount: int, + ?ssid: int64, + ?middleName: string, + ?lastName: string, + ?avatar: byte[], + ?dead: bool, + ?disabled: bool, + ?address: string, + ?country: string, + ?spouses: int, + ?children: int, + ?grandchildren: int, + ?bio: string, + ?savingsAcct: decimal, + ?loanAcct: decimal, + ?depositAcct: decimal, + ?dateOfBirth: DateOnly, + ?t1: DateTimeOffset, + ?t2: TimeSpan, + ?ts1: DateTimeOffset, + ?ts2: DateTime, + ?passportId: Guid, + ?metadata: string, + ?metadatab: string, + ?colFl: double, + ?colReal: double, + ?colDbl: double, + ?colFl8: double + ) = + + let parameters = + [ + ("first_name", Sql.string firstName) + ("ssid", Sql.int64OrNone ssid) + ("middle_name", Sql.stringOrNone middleName) + ("last_name", Sql.stringOrNone lastName) + ("avatar", Sql.byteaOrNone avatar) + ("dead", Sql.boolOrNone dead) + ("disabled", Sql.boolOrNone disabled) + ("address", Sql.stringOrNone address) + ("country", Sql.stringOrNone country) + ("spouses", Sql.intOrNone spouses) + ("children", Sql.intOrNone children) + ("grandchildren", Sql.intOrNone grandchildren) + ("bio", Sql.textOrNone bio) + ("savings_acct", Sql.decimalOrNone savingsAcct) + ("loan_acct", Sql.decimalOrNone loanAcct) + ("deposit_acct", Sql.decimalOrNone depositAcct) + ("book_count", Sql.int bookCount) + ("date_of_birth", Sql.dateOrNone dateOfBirth) + ("t_1", Sql.timestamptzOrNone t1) + ("t_2", Sql.intervalOrNone t2) + ("ts_1", Sql.timestamptzOrNone ts1) + ("ts_2", Sql.timestampOrNone ts2) + ("passport_id", Sql.uuidOrNone passportId) + ("metadata", Sql.jsonbOrNone metadata) + ("metadatab", Sql.jsonbOrNone metadatab) + ("col_fl", Sql.doubleOrNone colFl) + ("col_real", Sql.doubleOrNone colReal) + ("col_dbl", Sql.doubleOrNone colDbl) + ("col_fl8", Sql.doubleOrNone colFl8) + ] + + conn + |> Sql.connect + |> Sql.query Sqls.createAuthor + |> Sql.parameters parameters + |> Sql.executeRowAsync authorReader + + member this.deleteAuthor(id: int64) = + + let parameters = [ ("id", Sql.int64 id) ] + + conn + |> Sql.connect + |> Sql.query Sqls.deleteAuthor + |> Sql.parameters parameters + |> Sql.executeNonQueryAsync + + member this.getAuthor(id: int64) = + + let parameters = [ ("id", Sql.int64 id) ] + + conn + |> Sql.connect + |> Sql.query Sqls.getAuthor + |> Sql.parameters parameters + |> Sql.executeRowAsync authorReader + + /// This Sql will list all authors + member this.listAuthors() = + + conn + |> Sql.connect + |> Sql.query Sqls.listAuthors + |> Sql.executeAsync listAuthorsRowReader diff --git a/test/src/pg/Readers.fs b/test/src/pg/Readers.fs new file mode 100644 index 0000000..84b8155 --- /dev/null +++ b/test/src/pg/Readers.fs @@ -0,0 +1,51 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace PAuthors + +open System +open Npgsql +open Npgsql.FSharp + +module Readers = + + let authorReader (r: RowReader) : Author = + { + Author.Id = r.int64 "id" + Ssid = r.int64OrNone "ssid" + FirstName = r.string "first_name" + MiddleName = r.stringOrNone "middle_name" + LastName = r.stringOrNone "last_name" + Avatar = r.byteaOrNone "avatar" + Dead = r.boolOrNone "dead" + Disabled = r.boolOrNone "disabled" + Address = r.stringOrNone "address" + Country = r.stringOrNone "country" + Spouses = r.intOrNone "spouses" + Children = r.intOrNone "children" + Grandchildren = r.intOrNone "grandchildren" + Bio = r.textOrNone "bio" + SavingsAcct = r.decimalOrNone "savings_acct" + LoanAcct = r.decimalOrNone "loan_acct" + DepositAcct = r.decimalOrNone "deposit_acct" + BookCount = r.int "book_count" + DateOfBirth = r.dateOnlyOrNone "date_of_birth" + T1 = r.datetimeOffsetOrNone "t_1" + T2 = r.intervalOrNone "t_2" + Ts1 = r.datetimeOffsetOrNone "ts_1" + Ts2 = r.dateTimeOrNone "ts_2" + PassportId = r.uuidOrNone "passport_id" + Metadata = r.stringOrNone "metadata" + Metadatab = r.stringOrNone "metadatab" + ColFl = r.doubleOrNone "col_fl" + ColReal = r.doubleOrNone "col_real" + ColDbl = r.doubleOrNone "col_dbl" + ColFl8 = r.doubleOrNone "col_fl8" + } + + let listAuthorsRowReader (r: RowReader) : ListAuthorsRow = + { + ListAuthorsRow.FirstName = r.string "first_name" + LastName = r.stringOrNone "last_name" + Dead = r.boolOrNone "dead" + } diff --git a/test/src/sqlite/Models.fs b/test/src/sqlite/Models.fs new file mode 100644 index 0000000..cd69707 --- /dev/null +++ b/test/src/sqlite/Models.fs @@ -0,0 +1,24 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace SAuthors + +open System +open Fumble + +type Author = + { + Id: int + Name: string + Bio: string option + Address: string option + DateOfBirth: DateTime option + LastTs: DateTimeOffset option + SavingsAmt: double option + LoanAmt: decimal option + Disabled: bool option + Married: bool option + Payable: decimal option + } + +type GetAuthor2Row = { Id: int; Name: string; Bio: string option } diff --git a/test/src/sqlite/Queries.fs b/test/src/sqlite/Queries.fs new file mode 100644 index 0000000..d2f70e5 --- /dev/null +++ b/test/src/sqlite/Queries.fs @@ -0,0 +1,101 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace SAuthors + +open System +open Fumble +open SAuthors.Readers + +module Sql = Sqlite +type Sql = Sqlite + +module Sqls = + + [] + let createAuthor = + """ + INSERT INTO authors ( + name, bio +) VALUES ( + @name, @bio +) +RETURNING id, name, bio, address, date_of_birth, last_ts, savings_amt, loan_amt, disabled, married, payable + """ + + [] + let deleteAuthor = + """ + DELETE FROM authors +WHERE id = @id + """ + + [] + let getAuthor = + """ + SELECT id, name, bio, address, date_of_birth, last_ts, savings_amt, loan_amt, disabled, married, payable FROM authors +WHERE id = @id LIMIT 1 + """ + + [] + let getAuthor2 = + """ + SELECT id, name, bio FROM authors +WHERE id = @id LIMIT 1 + """ + + [] + let listAuthors = + """ + SELECT id, name, bio, address, date_of_birth, last_ts, savings_amt, loan_amt, disabled, married, payable FROM authors +ORDER BY name + """ + +[] +type DB(conn: string) = + + // https://www.connectionstrings.com/sqlite-net-provider + + member this.createAuthor(name: string, ?bio: string) = + + let parameters = [ ("name", Sql.string name); ("bio", Sql.stringOrNone bio) ] + + conn + |> Sql.connect + |> Sql.query Sqls.createAuthor + |> Sql.parameters parameters + |> Sql.execute authorReader + + member this.deleteAuthor(id: int) = + + let parameters = [ ("id", Sql.int id) ] + + conn + |> Sql.connect + |> Sql.query Sqls.deleteAuthor + |> Sql.parameters parameters + |> Sql.executeNonQuery + + member this.getAuthor(id: int) = + + let parameters = [ ("id", Sql.int id) ] + + conn + |> Sql.connect + |> Sql.query Sqls.getAuthor + |> Sql.parameters parameters + |> Sql.execute authorReader + + member this.getAuthor2(id: int) = + + let parameters = [ ("id", Sql.int id) ] + + conn + |> Sql.connect + |> Sql.query Sqls.getAuthor2 + |> Sql.parameters parameters + |> Sql.execute getAuthor2RowReader + + member this.listAuthors() = + + conn |> Sql.connect |> Sql.query Sqls.listAuthors |> Sql.execute authorReader diff --git a/test/src/sqlite/Readers.fs b/test/src/sqlite/Readers.fs new file mode 100644 index 0000000..508260f --- /dev/null +++ b/test/src/sqlite/Readers.fs @@ -0,0 +1,29 @@ +// Code generated by sqlc. DO NOT EDIT. +// version: sqlc v1.18.0 + +namespace SAuthors + +open System +open Fumble + +type RowReader = SqliteRowReader + +module Readers = + + let authorReader (r: RowReader) : Author = + { + Author.Id = r.int "id" + Name = r.string "name" + Bio = r.stringOrNone "bio" + Address = r.stringOrNone "address" + DateOfBirth = r.dateTimeOrNone "date_of_birth" + LastTs = r.dateTimeOffsetOrNone "last_ts" + SavingsAmt = r.doubleOrNone "savings_amt" + LoanAmt = r.decimalOrNone "loan_amt" + Disabled = r.boolOrNone "disabled" + Married = r.boolOrNone "married" + Payable = r.decimalOrNone "payable" + } + + let getAuthor2RowReader (r: RowReader) : GetAuthor2Row = + { GetAuthor2Row.Id = r.int "id"; Name = r.string "name"; Bio = r.stringOrNone "bio" }