diff --git a/adl/adl.work.json b/adl/adl.work.json index f3029d70..2f0d0186 100644 --- a/adl/adl.work.json +++ b/adl/adl.work.json @@ -1,4 +1,55 @@ { "adlc": "0.0.0", - "use": [] + "defaultGenOptions": [ + { + "tsgen": { + "referenceable": "remote", + "outputs": { + "outputDir": "", + "manifest": null + }, + "includeRuntime": false, + "runtimeDir": null, + "generate_transitive": false, + "include_resolver": false, + "modules": "all", + "capitalize_branch_names_in_types": true, + "capitalize_type_names": true + } + } + ], + "use": [ + { + "path": "./adlc_dev" + }, + { + "path": "./tests/test31/lib" + }, + { + "path": "./stdlib/adlc" + }, + { + "path": "./stdlib/sys" + }, + { + "path": "./tests/test31/proj", + "genOptions": [ + { + "tsgen": { + "outputs": { + "outputDir": "../generated/typescript/tests/test31/src", + "manifest": "../generated/typescript/tests/test31/.adl-manifest" + }, + "includeRuntime": true, + "runtimeDir": "./runtime", + "generate_transitive": true, + "include_resolver": true, + "modules": "all", + "capitalize_branch_names_in_types": true, + "capitalize_type_names": true + } + } + ] + } + ] } \ No newline at end of file diff --git a/adl/adlc_dev/adl.pkg.json b/adl/adlc_dev/adl.pkg.json index c8c8b201..818e73d2 100644 --- a/adl/adlc_dev/adl.pkg.json +++ b/adl/adlc_dev/adl.pkg.json @@ -1,4 +1,4 @@ { - "pkg": { "path" : "" }, + "path" : "github.com/adl-lang/adl/adl/adkc_dev", "adlc": "0.0.0" } \ No newline at end of file diff --git a/adl/stdlib/adl.pkg.json b/adl/stdlib/adl.pkg.json deleted file mode 100644 index c8c8b201..00000000 --- a/adl/stdlib/adl.pkg.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "pkg": { "path" : "" }, - "adlc": "0.0.0" -} \ No newline at end of file diff --git a/adl/stdlib/adlc/adl.pkg.json b/adl/stdlib/adlc/adl.pkg.json new file mode 100644 index 00000000..5fee7ae7 --- /dev/null +++ b/adl/stdlib/adlc/adl.pkg.json @@ -0,0 +1,5 @@ +{ + "path" : "github.com/adl-lang/adl/adl/stdlib/adlc", + "globalAlias": "adlc", + "adlc": "0.0.0" +} \ No newline at end of file diff --git a/adl/stdlib/adlc/packaging.adl b/adl/stdlib/adlc/packaging.adl index d0520489..5469ba56 100644 --- a/adl/stdlib/adlc/packaging.adl +++ b/adl/stdlib/adlc/packaging.adl @@ -3,19 +3,28 @@ module adlc.packaging { +import sys.types.Pair; + +type AdlWorkspace0 = AdlWorkspace; +type AdlWorkspace1 = AdlWorkspace>; + /// Expected to live in a file named `adl.work.json` -struct AdlWorkspace { +struct AdlWorkspace { /// Version String adlc; - AdlPackageRefs use; + Vector defaultGenOptions = []; + Vector use; + // AdlPackageRefs use; }; +// key must be a path to a directory directly under the folder containing the `adl.work.json` file. +// type AdlPackageRefs = StringMap; + type AdlPackageRefs = Vector; struct AdlPackageRef { - /// must be a path to a directory directly under the folder containing the `adl.work.json` file. - String root; - Vector genOptions; + String path; + Vector genOptions = []; }; union GenOptions { @@ -68,7 +77,8 @@ union ReferenceableScopeOption { /// Expected to live in a file named `adl.pkg.json` struct AdlPackage { - PackageDirective pkg; + String path; + Nullable globalAlias = null; /// Version String adlc; Vector requires = []; diff --git a/adl/stdlib/sys/adl.pkg.json b/adl/stdlib/sys/adl.pkg.json new file mode 100644 index 00000000..ab85eda3 --- /dev/null +++ b/adl/stdlib/sys/adl.pkg.json @@ -0,0 +1,5 @@ +{ + "path" : "github.com/adl-lang/adl/adl/stdlib/sys", + "globalAlias": "sys", + "adlc": "0.0.0" +} \ No newline at end of file diff --git a/adl/tests/test31/lib/adl.pkg.json b/adl/tests/test31/lib/adl.pkg.json new file mode 100644 index 00000000..d78ec64d --- /dev/null +++ b/adl/tests/test31/lib/adl.pkg.json @@ -0,0 +1,5 @@ +{ + "path" : "github.com/adl-lang/adl/adl/tests/test31/lib", + "globalAlias": "common", + "adlc": "0.0.0" +} \ No newline at end of file diff --git a/adl/tests/test31/lib/common.adl b/adl/tests/test31/lib/common.adl new file mode 100644 index 00000000..d8c48a78 --- /dev/null +++ b/adl/tests/test31/lib/common.adl @@ -0,0 +1,94 @@ +module common { + +import common.strings.StringNE; +import common.db.DbColumnType; + +/// A instant in time, represented as milliseconds from +/// the epoch of "1970-01-01T00:00:00Z +newtype Instant = Int64; + +/// A date in ISO8601 format +newtype LocalDate = String = "1970-01-01"; + +/// A time in ISO8601 format +newtype LocalTime = String = "00:00:00"; + +/// A datetime in ISO8601 format +newtype LocalDateTime = String = "1970-01-01T00:00:00"; + +/// The day of the week +union DayOfWeek { + Void monday; + Void tuesday; + Void wednesday; + Void thursday; + Void friday; + Void saturday; + Void sunday; +}; + +/// A duration in ISO8601 format +@DbColumnType "interval" +newtype Duration = String = "P1D"; + +/// An IANA timezone +newtype Timezone = StringNE; + +/// A holder for paginated results +struct Paginated { + + /// The paginated items + Vector items; + + /// The offset used for this query + Int64 current_offset; + + /// The size of the entire date set + Int64 total_size; +}; + +/// Empty Struct (Used mostly for Void RPC responses) +struct Unit {}; + +/// Phantom type to capture a StringMap with a named string key type: +type StringKeyMap = StringMap; + +/// Naming aid for strings used as keys +type Key = String; + +/// A value of type T along with the Key +struct WithKey { + Key key; + T value; +}; + +/// Postgres array of strings type that is serialized in to a list of Strings +@DbColumnType "text[]" +newtype StringList = Vector; + +// Postgres tsvector type for plain text searching +@DbColumnType "tsvector" +newtype TSVector = String; + +/// Postgres Geography type that is serialized using GeoJson +// Maps to postgres database as 'geography' column +// See https://postgis.net/workshops/postgis-intro/geography.html#casting-to-geometry +@DbColumnType "geography" +newtype GeographyGeoJson = String; + +/// Postgres Geometry type +// Maps to postgres database as 'geometry' column +// Serialise to/from WKT (Well Known Text) in the string +@DbColumnType "geometry" +newtype GeometryWKT = String; + + +/// A floating point decimal value +// +// In principle, the json format allows numbers with arbitrary decimal precision. However +// support for this is dependent on individual client libraries, and crucially the +// JSON.parse() function in browsers converts json numbers to a binary floating point +// double representation. Hence we take the safe option and serialize as JSON strings. +@DbColumnType "numeric" +newtype BigDecimal = String; +}; diff --git a/adl/tests/test31/lib/common.adl-java b/adl/tests/test31/lib/common.adl-java new file mode 100644 index 00000000..4b922fc2 --- /dev/null +++ b/adl/tests/test31/lib/common.adl-java @@ -0,0 +1,82 @@ +module common +{ +import adlc.config.java.*; +import common.db.JavaDbCustomType; +import sys.types.Set; + +annotation JavaPackage "au.com.helixta.adl.common"; + +annotation Instant JavaCustomType { + "javaname" : "java.time.Instant", + "helpers" : "au.com.helixta.adl.custom.InstantHelpers" +}; + +annotation LocalDate JavaCustomType { + "javaname" : "java.time.LocalDate", + "helpers" : "au.com.helixta.adl.custom.LocalDateHelpers" +}; + +annotation LocalTime JavaCustomType { + "javaname" : "java.time.LocalTime", + "helpers" : "au.com.helixta.adl.custom.LocalTimeHelpers" +}; + +annotation LocalDateTime JavaCustomType { + "javaname" : "java.time.LocalDateTime", + "helpers" : "au.com.helixta.adl.custom.LocalDateTimeHelpers" +}; + +annotation DayOfWeek JavaCustomType { + "javaname" : "java.time.DayOfWeek", + "helpers" : "au.com.helixta.adl.custom.DayOfWeekHelpers" +}; + +annotation GeographyGeoJson JavaCustomType { + "javaname" : "org.postgis.PGgeometry", + "helpers": "au.com.helixta.adl.custom.PGgeometryHelpers" +}; + +annotation GeographyGeoJson JavaDbCustomType { + "javaDbType": "org.postgis.PGgeometry", + "helpers": "au.com.helixta.adl.custom.PGgeometryHelpers" +}; + +annotation GeometryWKT JavaCustomType { + "javaname" : "org.postgis.PGgeometry", + "helpers": "au.com.helixta.adl.custom.PGgeometryWKTHelpers" +}; + +annotation GeometryWKT JavaDbCustomType { + "javaDbType" : "org.postgis.PGgeometry", + "helpers": "au.com.helixta.adl.custom.PGgeometryWKTHelpers" +}; + +annotation TSVector JavaDbCustomType { + "javaDbType" : "String", + "helpers": "au.com.helixta.adl.custom.TSVectorHelpers" +}; + +annotation StringList JavaDbCustomType { + "javaDbType" : "au.com.helixta.nofrills.sql.StringList", + "helpers": "au.com.helixta.adl.custom.StringListHelpers" +}; + +annotation StringList JavaCustomType { + "javaname" : "au.com.helixta.nofrills.sql.StringList", + "helpers": "au.com.helixta.adl.custom.StringListHelpers" +}; + +annotation BigDecimal JavaCustomType { + "javaname" : "java.math.BigDecimal", + "helpers": "au.com.helixta.adl.custom.BigDecimalHelpers" +}; + +annotation BigDecimal JavaDbCustomType { + "javaDbType" : "java.math.BigDecimal", + "helpers": "au.com.helixta.adl.custom.BigDecimalHelpers" +}; + +/// Force adlc java runtime to include HashSetHelpers.java +newtype SetStrings = Set; + +}; diff --git a/adl/tests/test31/lib/common/adminui/api.adl b/adl/tests/test31/lib/common/adminui/api.adl new file mode 100644 index 00000000..a9217501 --- /dev/null +++ b/adl/tests/test31/lib/common/adminui/api.adl @@ -0,0 +1,168 @@ +module common.adminui.api { + +import sys.adlast.TypeExpr; +import sys.adlast.ModuleName; +import sys.adlast.ScopedName; +import sys.types.Maybe; +import common.Unit; +import common.db.DbKey; +import common.db.WithDbId; +import common.Paginated; +import common.tabular.TableQuery; +import common.http.HttpPost; +import common.adminui.db.MetaAdlDecl; + +import common.flyway.api.FlywayReq; +import common.flyway.api.FlywayResp; + +// An adl value serialised via the standard schema +type AdlValue = Json; + +// A Row in a table is a map of Adl values +type DbRow = StringMap; +type DbId = DbKey; + +/// Endpoints supporting CRUD for the Admin UI, +/// along with metadata access. +struct AdminApiRequests { + + /// Query the tables available for admin access + HttpPost> queryTables = { + "path" : "/admin/meta/tables", + "security" : {"tokenWithRole": "admin"} + }; + + /// Query the ADL declarations used in the column + /// type declarations + HttpPost> queryDecls = { + "path" : "/admin/meta/decls", + "security" : {"tokenWithRole": "admin"} + }; + + /// Query rows from a table, specifying pagination, + /// filtering and sorting. As per it's return type, this method + /// can only be used for tables with a string primary key + /// called `id`. + HttpPost>> query = { + "path" : "/admin/query", + "security" : {"tokenWithRole": "admin"} + }; + + /// Query rows from a table without requiring a primary key, specifying pagination, + /// filtering and sorting. + HttpPost> queryWithoutIds = { + "path" : "/admin/query-without-ids", + "security" : {"tokenWithRole": "admin"} + }; + + /// Create a new row in a table + HttpPost> create = { + "path" : "/admin/create", + "security" : {"tokenWithRole": "admin"} + }; + + /// Update a single existing row in a table + HttpPost> update = { + "path" : "/admin/update", + "security" : {"tokenWithRole": "admin"} + }; + + /// Delete a single existing row in a table + HttpPost> delete = { + "path" : "/admin/delete", + "security" : {"tokenWithRole": "admin"} + }; + + /// Fetch descriptive strings for db keys + HttpPost,Vector> dbKeyLabels = { + "path" : "/admin/dbkeylabels", + "security" : {"tokenWithRole": "admin"} + }; + + /// Flyway migration handled by adminui + HttpPost flyway = { + "path" : "/admin/flyway", + "security" : {"tokenWithRole": "admin"} + }; +}; + +struct QueryReq { + String table; + + /// The columns to be included in the result. If empty, all + /// columns will be included. + Vector columns; + + TableQuery query; +}; + +struct Table { + String name; + String label; + String description; + Bool hasIdPrimaryKey; + + /// If true, the current user is allowed to query existingRows + Bool allowQuery; + + /// If true, the current user is allowed to insert new rows + Bool allowInsert; + + /// If true, the current user is allowed to delete rows + Bool allowDelete; + + /// If true, the current user is allowed to update rows + Bool allowUpdate; + + Vector columns; + ModuleName declModuleName; + String declName; +}; + +union Access { + Void readOnly; + Void readWrite; +}; + +struct TableColumn { + String name; + String label; + String description; + TypeExpr typeExpr; + Maybe defaultValue; + Bool generated; + Access access; +}; + +struct CreateReq { + String table; + DbRow values; +}; + +struct UpdateReq { + String table; + WithDbId values; +}; + +struct DeleteReq { + String table; + DbId id; +}; + +union DbResult { + T ok; + String dbError; +}; + +struct DbKeyLabelReq { + ScopedName scopedName; + String id; +}; + +struct DbKeyLabelResp { + ScopedName scopedName; + String id; + String label; +}; + +}; diff --git a/adl/tests/test31/lib/common/adminui/api.adl-java b/adl/tests/test31/lib/common/adminui/api.adl-java new file mode 100644 index 00000000..106a2ff7 --- /dev/null +++ b/adl/tests/test31/lib/common/adminui/api.adl-java @@ -0,0 +1,6 @@ +module common.adminui.api +{ +import adlc.config.java.*; + +annotation JavaPackage "au.com.helixta.adl.common.adminui.api"; +}; diff --git a/adl/tests/test31/lib/common/adminui/config.adl b/adl/tests/test31/lib/common/adminui/config.adl new file mode 100644 index 00000000..dfc90669 --- /dev/null +++ b/adl/tests/test31/lib/common/adminui/config.adl @@ -0,0 +1,35 @@ +module common.adminui.config { + +import common.config.frontend.ThreadPoolConfig; +import common.config.log.LogConfig; +import common.config.log.ClientLogConfig; +import common.config.db.PostgreSqlConfig; + +/// Configuration for the adminui server +struct ServerConfig { + /// Port to listen for requests + Int16 port; + + /// Environment name, e.g. dev, uat, prod + String environment; + + /// Name of the release / code version + String releaseName; + + /// Secret used to sign JWT tokens + String jwtSecret; + + /// Database config + PostgreSqlConfig db; + + /// Server-side logging config + LogConfig logging; + + /// Frontend threadpool configuration + ThreadPoolConfig threadPool; + + // DB Migration config, if null flyway is not available. + Nullable flyway = null; +}; + +}; diff --git a/adl/tests/test31/lib/common/adminui/db.adl b/adl/tests/test31/lib/common/adminui/db.adl new file mode 100644 index 00000000..27132a7e --- /dev/null +++ b/adl/tests/test31/lib/common/adminui/db.adl @@ -0,0 +1,40 @@ +module common.adminui.db { + +import sys.adlast.Decl; +import sys.adlast.ModuleName; +import common.db.DbTable; +import common.db.DbColumnName; +import common.Instant; + +struct MetaTable { + String name; + String description; + ModuleName declModuleName; + String declName; +}; + +annotation MetaTable DbTable { + "withPrimaryKey": [ + "name" + ], + "uniquenessConstraints": [ + ["name"] + ] +}; + +struct MetaAdlDecl { + ModuleName moduleName; + String name; + Decl decl; +}; + +annotation MetaAdlDecl DbTable { + "withPrimaryKey": [ + "moduleName", "name" + ], + "uniquenessConstraints": [ + ["moduleName", "name"] + ] +}; + +}; diff --git a/adl/tests/test31/lib/common/adminui/db.adl-java b/adl/tests/test31/lib/common/adminui/db.adl-java new file mode 100644 index 00000000..472873e2 --- /dev/null +++ b/adl/tests/test31/lib/common/adminui/db.adl-java @@ -0,0 +1,6 @@ +module common.adminui.db +{ +import adlc.config.java.*; + +annotation JavaPackage "au.com.helixta.adl.common.adminui.db"; +}; diff --git a/adl/tests/test31/lib/common/config/aws.adl b/adl/tests/test31/lib/common/config/aws.adl new file mode 100644 index 00000000..075a970e --- /dev/null +++ b/adl/tests/test31/lib/common/config/aws.adl @@ -0,0 +1,46 @@ +module common.config.aws +{ + +struct AwsCredentials { + String accessKey; + String secretKey; +}; + +/// The methods available to obtain AWS credentials +union AwsCredentialsProvider { + Void useEnvVariables; + Void useInstanceProfile; + AwsCredentials value; +}; + +/// The methods available to determine the AWS region +union AwsRegionProvider { + Void fromInstance; + String value; +}; + +struct S3Location { + /// The name on of an S3 bucket + String s3Bucket; + + /// The S3 path prefix for the storage + String s3Prefix; +}; + +/// Description of an AWS SQS queue +struct QueueDetails { + /// The amount of time to keep the connection open while waiting for queue messages. + Int32 waitTimeSeconds = 10; + String queueUrl = ""; +}; + + +/// Configuration of an S3 client +struct S3ClientConfig { + S3Location location; + AwsRegionProvider region; + AwsCredentialsProvider credentials; +}; + + +}; diff --git a/adl/tests/test31/lib/common/config/aws.adl-java b/adl/tests/test31/lib/common/config/aws.adl-java new file mode 100644 index 00000000..0ffa32cf --- /dev/null +++ b/adl/tests/test31/lib/common/config/aws.adl-java @@ -0,0 +1,7 @@ +module common.config.aws +{ + +import adlc.config.java.*; + +annotation JavaPackage "au.com.helixta.adl.common.config.aws"; +}; diff --git a/adl/tests/test31/lib/common/config/azure.adl b/adl/tests/test31/lib/common/config/azure.adl new file mode 100644 index 00000000..8d07e641 --- /dev/null +++ b/adl/tests/test31/lib/common/config/azure.adl @@ -0,0 +1,15 @@ +module common.config.azure { + + /// Methods available to obtain Azure credentials + union AzureCredentialsProvider { + /// A shared access signature string + String sharedAccessSignature; + }; + + /// Config parameters for an Azure Blob Client + struct AzureBlobClientConfig { + String blobstoreUri; + AzureCredentialsProvider credentials; + String container; + }; +}; diff --git a/adl/tests/test31/lib/common/config/db.adl b/adl/tests/test31/lib/common/config/db.adl new file mode 100644 index 00000000..34a5adf6 --- /dev/null +++ b/adl/tests/test31/lib/common/config/db.adl @@ -0,0 +1,67 @@ +module common.config.db +{ + +// Connection details for a postgres server +struct PostgreSqlConfig +{ + String host; + Int32 port; + + // the database name + String dbname; + + // login parameters + String user; + String password; + + // The size of the connection pool + Int32 poolSize = 5; + + // true to enable the postgis extensions + Bool postgisEnabled = false; + + /// The amount of time to wait before refreshing a connection + Int32 minRefreshDelayMillis = 60000; + + /// The amount of time to wait for a free connection before timing out + Int32 connectionWaitMillis = 5000; + + /// The root cert to be used for ssl connections + Nullable sslRootCert = null; + + /// The ssl connection mode + PostgresqlSslMode sslMode = "prefer"; +}; + +union PostgresqlSslMode { + Void disable; + Void allow; + Void prefer; + Void require; + + @SerializedName "verify-ca" + Void verifyCa; + + @SerializedName "verify-full" + Void verifyFull; +}; + + +// Connection details for a microsoft SQL server +struct SqlServerConfig +{ + String host; + Int32 port; + + // the database name + String dbname; + + // login parameters + String user; + String password; + + // The size of the connection pool + Int32 poolSize = 5; +}; + +}; diff --git a/adl/tests/test31/lib/common/config/db.adl-java b/adl/tests/test31/lib/common/config/db.adl-java new file mode 100644 index 00000000..ad45d24e --- /dev/null +++ b/adl/tests/test31/lib/common/config/db.adl-java @@ -0,0 +1,7 @@ +module common.config.db +{ + +import adlc.config.java.*; + +annotation JavaPackage "au.com.helixta.adl.common.config.db"; +}; diff --git a/adl/tests/test31/lib/common/config/emailer.adl b/adl/tests/test31/lib/common/config/emailer.adl new file mode 100644 index 00000000..f662a8d8 --- /dev/null +++ b/adl/tests/test31/lib/common/config/emailer.adl @@ -0,0 +1,22 @@ +module common.config.emailer +{ +import common.config.aws.AwsCredentialsProvider; +import common.config.aws.AwsRegionProvider; + +/// Configuration for the emailer used +union EmailerConfig { + /// Fake emailer, just prints outgoing email info to stdout + Void fake; + + /// SES emailer implementation + SesConfig ses; + +}; + +/// Configuration for AWS SES emailer implementation +struct SesConfig { + AwsCredentialsProvider credentials; + AwsRegionProvider region; +}; + +}; diff --git a/adl/tests/test31/lib/common/config/emailer.adl-java b/adl/tests/test31/lib/common/config/emailer.adl-java new file mode 100644 index 00000000..0982d4ce --- /dev/null +++ b/adl/tests/test31/lib/common/config/emailer.adl-java @@ -0,0 +1,7 @@ +module common.config.emailer +{ + +import adlc.config.java.*; + +annotation JavaPackage "au.com.helixta.adl.common.config.emailer"; +}; diff --git a/adl/tests/test31/lib/common/config/frontend.adl b/adl/tests/test31/lib/common/config/frontend.adl new file mode 100644 index 00000000..f194854f --- /dev/null +++ b/adl/tests/test31/lib/common/config/frontend.adl @@ -0,0 +1,22 @@ +module common.config.frontend { + +/// Configuration for a consumer which processes work from a queue with a bounded thread pool. +/// +/// e.g. Useful for configuring jetty's request thread pool. +/// +// TODO: Java custom type to map this to au.com.helixta.util.concurrent.ThreadPoolConfig +struct ThreadPoolConfig { + /// Maximum number of threads that can be spawned to handle requests + Int32 maxThreads; + + /// Minimum number of threads ready for jetty to handle requests + Int32 minThreads; + + /// How long a thread can be idle before it is candidate to be stopped + Int32 idleTimeoutMillis; + + /// Maximum number of pending requests before new ones are dropped + Int32 queueSize; +}; + +}; diff --git a/adl/tests/test31/lib/common/config/frontend.adl-java b/adl/tests/test31/lib/common/config/frontend.adl-java new file mode 100644 index 00000000..1e7586a7 --- /dev/null +++ b/adl/tests/test31/lib/common/config/frontend.adl-java @@ -0,0 +1,8 @@ +module common.config.frontend +{ + +import adlc.config.java.*; + +annotation JavaPackage "au.com.helixta.adl.common.config.frontend"; +}; + diff --git a/adl/tests/test31/lib/common/config/google.adl b/adl/tests/test31/lib/common/config/google.adl new file mode 100644 index 00000000..6aeb3bf4 --- /dev/null +++ b/adl/tests/test31/lib/common/config/google.adl @@ -0,0 +1,12 @@ +module common.config.google +{ + +/// Connection parameters for google services +struct GoogleConfig { + String clientId; + String clientSecret; + String accessToken; + String refreshToken; +}; + +}; diff --git a/adl/tests/test31/lib/common/config/google.adl-java b/adl/tests/test31/lib/common/config/google.adl-java new file mode 100644 index 00000000..b134c62c --- /dev/null +++ b/adl/tests/test31/lib/common/config/google.adl-java @@ -0,0 +1,8 @@ +module common.config.google +{ + +import adlc.config.java.*; + +annotation JavaPackage "au.com.helixta.adl.common.config.google"; +}; + diff --git a/adl/tests/test31/lib/common/config/jwt.adl b/adl/tests/test31/lib/common/config/jwt.adl new file mode 100644 index 00000000..86ba18b6 --- /dev/null +++ b/adl/tests/test31/lib/common/config/jwt.adl @@ -0,0 +1,10 @@ +module common.config.jwt +{ + +/// Secret and expiry of the JWT token +struct JwtConfig { + String secret; + Double expiryInMinutes; +}; + +}; diff --git a/adl/tests/test31/lib/common/config/jwt.adl-java b/adl/tests/test31/lib/common/config/jwt.adl-java new file mode 100644 index 00000000..c9ece9b4 --- /dev/null +++ b/adl/tests/test31/lib/common/config/jwt.adl-java @@ -0,0 +1,7 @@ +module common.config.jwt +{ + import adlc.config.java.*; + + annotation JavaPackage "au.com.helixta.adl.common.config.jwt"; + +}; diff --git a/adl/tests/test31/lib/common/config/log.adl b/adl/tests/test31/lib/common/config/log.adl new file mode 100644 index 00000000..f91dbc62 --- /dev/null +++ b/adl/tests/test31/lib/common/config/log.adl @@ -0,0 +1,69 @@ +module common.config.log +{ + +import common.config.aws.AwsCredentialsProvider; +import common.config.aws.AwsRegionProvider; + +// Configuration for logging +union LogLevel { + Void DEBUG; + Void INFO; + Void WARN; + Void ERROR; + Void NONE; + Void TRACE; +}; + +struct StdoutConfig { + LogLevel level; +}; + +struct FluentdConfig { + LogLevel level; + String hostname; + Int32 port; + String tag; +}; + +/// Server side Rollbar config +/// NOTE: Name is `ServerRollbarConfig` due to being named prior to adding +/// `ClientRollbarConfig` +struct RollbarConfig { + LogLevel level; + String serverToken; + String env = ""; +}; + +/// Server side logging, i.e. reporting errors originating from the server +/// NOTE: Name is not `ServerLogConfig` due to being named prior to adding +/// `ClientLogConfig` +struct LogConfig { + Nullable stdout = {"level" : "INFO"}; + Nullable fluentd = null; + Nullable rollbar = null; + Nullable cloudwatch = null; +}; + +/// Client side Rollbar config, i.e. from Rollbar JS +struct ClientRollbarConfig { + String accessToken; + String env = ""; +}; + +/// Client side logging, i.e. reporting errors originating from the client +struct ClientLogConfig { + Nullable rollbar = null; +}; + +/// Logging of metrics to cloudwatch +struct CloudWatchMetricLogConfig { + AwsCredentialsProvider credentials; + AwsRegionProvider region; + String env; + String namespace; + /// A whitelist of metrics that should be sent to CloudWatch. Users should be + /// aware that making this list too big can quickly cause costs to increase + Vector metricsToSend; +}; + +}; diff --git a/adl/tests/test31/lib/common/config/log.adl-java b/adl/tests/test31/lib/common/config/log.adl-java new file mode 100644 index 00000000..3daf592d --- /dev/null +++ b/adl/tests/test31/lib/common/config/log.adl-java @@ -0,0 +1,12 @@ +module common.config.log +{ + +import adlc.config.java.*; + +annotation JavaPackage "au.com.helixta.adl.common.config.log"; + +annotation LogLevel JavaCustomType { + "javaname" : "au.com.helixta.log.Log.Level", + "helpers" : "au.com.helixta.adl.custom.LogLevelHelpers" +}; +}; diff --git a/adl/tests/test31/lib/common/config/okta.adl b/adl/tests/test31/lib/common/config/okta.adl new file mode 100644 index 00000000..92c073d4 --- /dev/null +++ b/adl/tests/test31/lib/common/config/okta.adl @@ -0,0 +1,32 @@ +module common.config.okta +{ +import sys.types.Maybe; + +/// Config parameters for setting up open ID authentication using Okta +struct OktaConfig { + + // NOTE(Barry): The following parameters are sufficient to use for the + // majority of our applications; however, the Okta implementation was written + // for Sydney Airport which has many other features that are bespoke to that + // application. The Java binding for this ADL type ignores all of the extra + // config parameters that are not necessary (i.e. sets them to empty + // strings). + + /// ID of the client application + String clientId; + /// Secret of the client application + String clientSecret; + /// Hostname of the okta instance + String clientHost; + /// Endpoint which users should be redirected to to complete authorization + String authorizeEndpoint; + /// The URL supplied to okta to redirect the user to after authorization is + /// complete + String redirectUrl; + /// API key used for performing actions in okta, not required if the app does + /// not use this functionality + Maybe apiKey; +}; + + +}; diff --git a/adl/tests/test31/lib/common/config/okta.adl-java b/adl/tests/test31/lib/common/config/okta.adl-java new file mode 100644 index 00000000..b8474fbf --- /dev/null +++ b/adl/tests/test31/lib/common/config/okta.adl-java @@ -0,0 +1,12 @@ +module common.config.okta +{ + +import adlc.config.java.*; + +annotation JavaPackage "au.com.helixta.adl.common.config.okta"; + +annotation OktaConfig JavaCustomType { + "javaname" : "au.com.helixta.service.auth.OktaCfg", + "helpers" : "au.com.helixta.adl.custom.OktaConfigHelpers" +}; +}; diff --git a/adl/tests/test31/lib/common/config/server.adl b/adl/tests/test31/lib/common/config/server.adl new file mode 100644 index 00000000..964e07e6 --- /dev/null +++ b/adl/tests/test31/lib/common/config/server.adl @@ -0,0 +1,3 @@ +module common.config.server +{ +}; diff --git a/adl/tests/test31/lib/common/config/sms.adl b/adl/tests/test31/lib/common/config/sms.adl new file mode 100644 index 00000000..95af4094 --- /dev/null +++ b/adl/tests/test31/lib/common/config/sms.adl @@ -0,0 +1,32 @@ +module common.config.sms +{ +import common.config.aws.AwsCredentialsProvider; +import common.config.aws.AwsRegionProvider; +import common.strings.StringNE; + +/// Configuration for the sms provider used +union SmsConfig { + /// Fake sms, just prints outgoing message info to stdout + Void fake; + + /// Twilio implementation + TwilioConfig twilio; + + /// AWS SNS implementation + AwsSnsConfig sns; + +}; + +/// Configuration for twilio +struct TwilioConfig { + StringNE accountSid; + StringNE authToken; +}; + +/// Configuration for AWS SMS implementation +struct AwsSnsConfig { + AwsCredentialsProvider credentials; + AwsRegionProvider region; +}; + +}; diff --git a/adl/tests/test31/lib/common/config/sms.adl-java b/adl/tests/test31/lib/common/config/sms.adl-java new file mode 100644 index 00000000..d614f75c --- /dev/null +++ b/adl/tests/test31/lib/common/config/sms.adl-java @@ -0,0 +1,7 @@ +module common.config.sms +{ + +import adlc.config.java.*; + +annotation JavaPackage "au.com.helixta.adl.common.config.sms"; +}; diff --git a/adl/tests/test31/lib/common/config/storage.adl b/adl/tests/test31/lib/common/config/storage.adl new file mode 100644 index 00000000..5b3b7b6f --- /dev/null +++ b/adl/tests/test31/lib/common/config/storage.adl @@ -0,0 +1,30 @@ +module common.config.storage { + + struct LocalBlobstoreConfig { + /// The directory in which the blobstore files will be stored + String directory; + + /// The host of the server to be used for serving public files, e.g. "http://localhost" + String publicHost; + + /// The path on which blobstore objects are publically accessible e.g. "/files" + String publicPath; + }; + + struct LocalFileStoreConfig { + /// The directory in which the blobstore files will be stored + String directory; + + /// The host of the server to be used for serving public files, e.g. "http://localhost" + String publicHost; + + /// The path on which blobstore objects are publically accessible e.g. "/files" + String publicPath; + + /// False if the data in the FileStore is to be immutable + Bool allowOverwrite; + }; + + + +}; diff --git a/adl/tests/test31/lib/common/db.adl b/adl/tests/test31/lib/common/db.adl new file mode 100644 index 00000000..c5af92c7 --- /dev/null +++ b/adl/tests/test31/lib/common/db.adl @@ -0,0 +1,78 @@ +module common.db { + +// Annotation to indicate that an ADL struct is to be +// mapped to a database table +struct DbTable +{ + // If non empty, specifies the table name in the + // database. Otherwise it will be derived from the + // name of the adl struct + String tableName = ""; + + // If true, the generated db table will containing + // a text column `id`, which will be the primary key. + Bool withIdPrimaryKey = false; + + // If provided, use the given fields as the table's primary key + Vector withPrimaryKey = []; + + // Indexes for the table + Vector> indexes = []; + + // Uniqueness constraints over the table + Vector> uniquenessConstraints = []; + + // Additional SQL to be run after all tables have been + // created + Vector extraSql = []; + + // The textual columns used to assemble a label for the row + // (to be shown in UIs alongside the id) + Vector label = []; +}; + +// Annotation to indicate that an ADL struct is to be +// mapped to a database view. +struct DbView { + + // If non empty, specifies the view name in the + // database. Otherwise it will be derived from the + // name of the adl struct + String viewName = ""; + + // If true, the view will containing + // a text column `id`, which will be the primary key. + Bool withIdPrimaryKey = false; + + // If non empty the sql required to create the view. + // If empty, then it's assumed that the view corresponding + // is created by some other means + Vector viewSql = []; +}; + + +/// Field level annotation to override the name of the +/// database column. +type DbColumnName = String; + +/// Field or type level annotation to override the type of the +/// database column. +type DbColumnType = String; + +/// Field level annotation to indicate that a column value +/// is generated, and hence need not be requested from or edited +/// by a user. +type DbColumnGenerated = Void; + +/// A reference for a database stored value, referenced by a +/// string primary key. +newtype DbKey = String; + +/// A value of type T along with a unique db identifier +struct WithDbId +{ + DbKey id; + T value; +}; + +}; diff --git a/adl/tests/test31/lib/common/db.adl-java b/adl/tests/test31/lib/common/db.adl-java new file mode 100644 index 00000000..a16294b9 --- /dev/null +++ b/adl/tests/test31/lib/common/db.adl-java @@ -0,0 +1,32 @@ +module common.db +{ +import adlc.config.java.*; + +annotation JavaPackage "au.com.helixta.adl.common.db"; + +/// Annotation to control how java custom types are mapped to the +/// database +struct JavaDbCustomType { + /// The type returned from a db query + String javaDbType; + + /// The class in which to look for fromDb and toDb static functions + String helpers; +}; + +/// Annotation to control which format of java db table class is emitted +union JavaDbTableVersion { + Void v1; + Void v2; +}; + + +// Use a custom type for DbKey, primarily so that we can override the +// toString implementation +annotation DbKey JavaCustomType { + "javaname" : "au.com.helixta.adl.custom.DbKey", + "helpers": "au.com.helixta.adl.custom.DbKeyHelpers" +}; + +}; + diff --git a/adl/tests/test31/lib/common/flyway/api.adl b/adl/tests/test31/lib/common/flyway/api.adl new file mode 100644 index 00000000..d1ad8c40 --- /dev/null +++ b/adl/tests/test31/lib/common/flyway/api.adl @@ -0,0 +1,119 @@ +module common.flyway.api { + +import common.flyway.internals.FlywayAction; +import common.flyway.internals.FlywayContext; +import common.http.HttpPost; + +type FlywayApi = HttpPost; + +union FlywayReq { + /// Show pending migrations + Void plan; + /// Apply pending migrations + Void apply; + /// Something customized + AdvancedFlywayReq advanced; +}; + +/// Information passed when access via API +struct AdvancedFlywayReq { + FlywayReqContextOpt context = "application"; + FlywayAction action; +}; + +union FlywayReqContextOpt { + /// Derive the context from the app config + /// Throw exception if it app is not configured with a flyway context + Void application; + /// The first ctx in bootstrapable->bootstrap. + /// Throw exception if it doesn't exist + Void bootstrap; + /// Overide the application flyway config + FlywayContext customCtx; +}; + +union FlywayResp { + String error; + String msg; + FlywayCleanResult clean; + FlywayInfo info; + FlywayMigrateResult migrationResult; + FlywayBaselineResult baselineResult; + Vector sequence; +}; + +struct FlywayInfo { + String schemaVersion; + Vector migrations; +}; + +struct FlywayMigrationInfo { + String category; + String version; + String description; + String type_; + String installed_on; + String state; +}; + +struct FlywayMigrateResult { + String message; + // OperationResultBase + String flywayVersion; + String database; + Vector warnings; + String operation; + // MigrateResult + Nullable initialSchemaVersion; + Nullable targetSchemaVersion; + Nullable schemaName; + Vector migrations; + Nullable migrationsExecuted; +}; + +struct FlywayBaselineResult { + // OperationResultBase + String flywayVersion; + String database; + Vector warnings; + String operation; + // BaselineResult + Bool successfullyBaselined; + Nullable baselineVersion; +}; + +struct FlywayCleanResult { + // OperationResultBase + String flywayVersion; + String database; + Vector warnings; + String operation; + // BaselineResult + Vector schemasCleaned; + Vector schemasDropped; +}; + +struct FlywayMigrateOutput { + String category; + String version; + String description; + String type_; + String filepath; + Int32 executionTime; +}; + +// struct FlywaySchemaHistory { +// Int32 installed_rank; // | integer | not null +// Nullable version; // | character varying(50) | +// String description; // | character varying(200) | not null +// String type_; // | character varying(20) | not null +// String script; // | character varying(1000) | not null +// Nullable checksum; // | integer | +// String installed_by; // | character varying(100) | not null +// Instant installed_on; // | timestamp without time zone | not null default now() +// Int32 execution_time; // | integer | not null +// Bool success; // | boolean | not null +// }; + + +}; diff --git a/adl/tests/test31/lib/common/flyway/api.adl-java b/adl/tests/test31/lib/common/flyway/api.adl-java new file mode 100644 index 00000000..9c2b227f --- /dev/null +++ b/adl/tests/test31/lib/common/flyway/api.adl-java @@ -0,0 +1,7 @@ +module common.flyway.api { + +import adlc.config.java.*; + +annotation JavaPackage "au.com.helixta.adl.common.flyway.api"; + +}; diff --git a/adl/tests/test31/lib/common/flyway/example_patterns.adl b/adl/tests/test31/lib/common/flyway/example_patterns.adl new file mode 100644 index 00000000..9a7ba433 --- /dev/null +++ b/adl/tests/test31/lib/common/flyway/example_patterns.adl @@ -0,0 +1,103 @@ +// These are example configs which in a project would likely be constructed in code +// and used when calling FlywayUtils +module common.flyway.example_patterns { + +import common.flyway.internals.FlywayCommand; + + +// Example application config. +union FlywayConfig { + // bootstraps by applying "create migrations" & baseline existing migrations + // startup applies new migrations. + Void mixed_mode_latest; + // bootstraps by applying "create migrations" & baseline existing migrations + // startup does not apply migrations. Migrations can be applied via adminui. + Void mixed_mode_manual; + // bootstraps by applying all migrations from scratch. + // startup applies new migrations. + Void migration_mode_latest; + // customize the flyway app startup + FlywayCommand advanced; +}; + + +type ExampleFlywayCommand = StringMap; + +annotation ExampleFlywayCommand { +// Tim's preferred model for fastdev, which works the same, but runs +// any pending migrations when the server is started + "mixedmode_latest": + { + "ctx": { + "sqlMigrationDir": "/app/sql/migrations", + "javaMigrationPackage": "app/java/migrations" + }, + "bootstrap": [ + { + "withContext": { + "ctx": { + "schemaHistoryTable": "flyway_bootstrap_schema_history", + "sqlMigrationDir": "/app/sql/create", + "javaMigrationPackage": "app/java/createmigrations" + }, + "action": { + "sequence": [ + "baseline_zero", + "migrate" + ] + } + } + }, + "baseline_latest", + "migrate" + ], + "action": "migrate" + } +, +// Tim's preferred model for prod, which bootstraps a new db from create scripts +// and baselines to the last migration. Subsequent migrations are run from the adminui + "mixedmode_manual": + { + "ctx": { + "sqlMigrationDir": "/app/sql/migrations", + "javaMigrationPackage": "app/java/migrations" + }, + "bootstrap": [ + { + "withContext": { + "ctx": { + "schemaHistoryTable": "flyway_bootstrap_schema_history", + "sqlMigrationDir": "/app/sql/create", + "javaMigrationPackage": "app/java/createmigrations" + }, + "action": { + "sequence": [ + "baseline_zero", + "migrate" + ] + } + } + }, + "baseline_latest", + "migrate" + ], + "action": null + } +, +// Paul's preferred model for prod (I think) which bootstraps +// by applying all migrations from scratch + "migration_mode_latest": + { + "ctx": { + "sqlMigrationDir": "/app/sql/migrations", + "javaMigrationPackage": "app/java/migrations" + }, + "bootstrap": [ + "baseline_zero", + "migrate" + ], + "action": "migrate" + } +}; + +}; diff --git a/adl/tests/test31/lib/common/flyway/example_patterns.adl-java b/adl/tests/test31/lib/common/flyway/example_patterns.adl-java new file mode 100644 index 00000000..6fd24eb1 --- /dev/null +++ b/adl/tests/test31/lib/common/flyway/example_patterns.adl-java @@ -0,0 +1,7 @@ +module common.flyway.example_patterns { + +import adlc.config.java.*; + +annotation JavaPackage "au.com.helixta.adl.common.flyway.example_patterns"; + +}; diff --git a/adl/tests/test31/lib/common/flyway/internals.adl b/adl/tests/test31/lib/common/flyway/internals.adl new file mode 100644 index 00000000..64d13279 --- /dev/null +++ b/adl/tests/test31/lib/common/flyway/internals.adl @@ -0,0 +1,112 @@ +module common.flyway.internals { + +struct FlywayCommand { + FlywayContext ctx; + /// The action to take on application startup + Vector bootstrap = []; + Nullable action; +}; + +/// Contextual information required to run flyway +struct FlywayContext { + /// The name of the flyway history table. + /// Useful to override for bootstraping. + String schemaHistoryTable = "flyway_schema_history"; + /// The location from which db migrations are loaded + String sqlMigrationDir = "/app/sql/migrations"; + /// The package from which java based db migrations are loaded + Nullable javaMigrationPackage = "app/java/migrations"; + /// DB connection retries, with 1 second between retries + Int32 connect_retries = 3; +}; + +union FlywayAction { + /// Get migration info, the a combintation of the flyway schema history and future migrations. + Void info; + // Baseline actions + Void baseline_zero; + Void baseline_one; + Void baseline_latest; + String baseline_version; + // Migration actions + /// Migrate to lastest with default options + Void migrate; + MigrateLatestAction migrate_latest; + MigrateAction migrate_to_version; + // other + Vector sequence; + FlywayCtxAction withContext; +}; + +struct FlywayCtxAction { + /// The context in normal (non-bootstrap) mode + FlywayContext ctx; + /// Array of actions to run on bootstrap + FlywayAction action; +}; + +struct MigrateLatestAction { + /// migration options + MigrateOptionsOptions options = "defaults"; +}; + +struct MigrateAction { + /// migration options + MigrateOptionsOptions options = "defaults"; + String version; +}; + +union MigrateOptionsOptions { + Void defaults; + Void ignoreMissingMigrations; + Void ignoreIgnoredMigrations; + Void outOfOrder; + Void dontValidateOnMigrate; + MigrateOptions custom = {}; +}; + +struct MigrateOptions { + /// Ignore missing migrations when reading the schema history table. These are migrations that were performed by an + /// older deployment of the application that are no longer available in this version. + // Not the Flyway default + Bool ignoreMissingMigrations = true; + /// Ignore ignored migrations when reading the schema history table. These are migrations that were added in between + /// already migrated migrations in this version. + // Not the Flyway default + Bool ignoreIgnoredMigrations= true; + /// Ignore pending migrations when reading the schema history table. These are migrations that are available on the + /// classpath but have not yet been performed by an application deployment. + /// This can be useful for verifying that in-development migration changes don't contain any validation-breaking changes + /// of migrations that have already been applied to a production environment, e.g. as part of a CI/CD process, without + /// failing because of the existence of new migration versions. + Bool ignorePendingMigrations = false; + /// Ignore future migrations when reading the schema history table. These are migrations that were performed by a + /// newer deployment of the application that are not yet available in this version. For example: we have migrations + /// available on the classpath up to version 3.0. The schema history table indicates that a migration to version 4.0 + /// (unknown to us) has already been applied. Instead of bombing out (fail fast) with an exception, a + /// warning is logged and Flyway continues normally. This is useful for situations where one must be able to redeploy + /// an older version of the application after the database has been migrated by a newer one. + Bool ignoreFutureMigrations = true; + /// Whether to validate migrations and callbacks whose scripts do not obey the correct naming convention. A failure can be + /// useful to check that errors such as case sensitivity in migration prefixes have been corrected. + Bool validateMigrationNaming = false; + /// Whether to automatically call validate or not when running migrate. (default: {@code true}) + Bool validateOnMigrate = true; + /// Whether to automatically call clean or not when a validation error occurs. (default: {@code false}) + ///

This is exclusively intended as a convenience for development. even though we + /// strongly recommend not to change migration scripts once they have been checked into SCM and run, this provides a + /// way of dealing with this case in a smooth manner. The database will be wiped clean automatically, ensuring that + /// the next migration will bring you back to the state checked into SCM.

+ ///

Warning ! Do not enable in production !

+ Bool cleanOnValidationError = false; + /// Whether to disable clean. (default: {@code false}) + ///

This is especially useful for production environments where running clean can be quite a career limiting move.

+ Bool cleanDisabled = false; + /// Allows migrations to be run "out of order". + ///

If you already have versions 1 and 3 applied, and now a version 2 is found, + /// it will be applied too instead of being ignored.

+ Bool outOfOrder = false; +}; + + +}; diff --git a/adl/tests/test31/lib/common/flyway/internals.adl-java b/adl/tests/test31/lib/common/flyway/internals.adl-java new file mode 100644 index 00000000..c17b1343 --- /dev/null +++ b/adl/tests/test31/lib/common/flyway/internals.adl-java @@ -0,0 +1,7 @@ +module common.flyway.internals { + +import adlc.config.java.*; + +annotation JavaPackage "au.com.helixta.adl.common.flyway.internals"; + +}; diff --git a/adl/tests/test31/lib/common/http.adl b/adl/tests/test31/lib/common/http.adl new file mode 100644 index 00000000..ce930b76 --- /dev/null +++ b/adl/tests/test31/lib/common/http.adl @@ -0,0 +1,152 @@ +module common.http { + +import sys.types.Set; + +/// A marker type to associate the output response +/// type for an http Get request. +type Get = Void; + +/// A marker type to associate the input request and output response +/// types for an http put request. +type Put = Void; + +/// A marker type to associate the input request and output response +/// types for an http post request. +type Post = Void; + +/// An annotation indicating the URL path for a request +type Path = String; + +/// An annotation indicating that a request requires an Authorization http header +type AuthHeader = Void; + +/// An annotation indicating that a request is idemponent, and can hence be safely +/// retried on failure. +type Idempotent = Void; + +/// An annotation indicating that a request can be made idempotent if the caller +/// provides an Idempotency-Key header with a unique value. +type IdempotentWithKey = Void; + +/// The standard message body for errors +struct PublicErrorData { + String publicMessage; +}; + +/// New request types + +struct HttpGet { + String path; + HttpSecurity security; + Nullable rateLimit = null; + TypeToken respType = null; +}; + +struct HttpGetStream { + String path; + HttpSecurity security; + TypeToken respItemType = null; +}; + +struct HttpGet2 { + String path; + HttpSecurity security; + Nullable rateLimit = null; + TypeToken

paramsType = null; + TypeToken respType = null; +}; + +struct HttpPut { + String path; + HttpSecurity security; + Nullable rateLimit = null; + TypeToken reqType = null; + TypeToken respType = null; +}; + +struct HttpPost { + String path; + HttpSecurity security; + Nullable rateLimit = null; + TypeToken reqType = null; + TypeToken respType = null; +}; + +struct HttpPost2 { + String path; + HttpSecurity security; + Nullable rateLimit = null; + TypeToken

paramsType = null; + TypeToken reqType = null; + TypeToken respType = null; +}; + +struct HttpDelete { + String path; + HttpSecurity security; + TypeToken

paramsType = null; + TypeToken respType = null; +}; + +union HttpSecurity { + // The endpoint is publically accessible + Void public; + + // A token is required to access the endpoint + Void token; + + // A token containing the specified role is required + // for access + String tokenWithRole; +}; + + +struct HttpRateLimit { + Word32 maxRequests; + RateLimitTimeUnit perTimeUnit; +}; + +union RateLimitTimeUnit { + Void second; + Void minute; + Void hour; +}; + +/// API decl or request annotation to specify +/// the non-success response codes we want +/// included in the generated open API. The +/// Json should have exactly the structure of +/// the OpenApi responses map +type OpenApiOtherResponses = Json; + + +/// API annotation to specify the available +/// server endpoints. The Json should have +/// exactly the structure of the OpenApi +/// servers map +type OpenApiServers = Json; + +/// Marker annotation to indicate that an endpoint +/// or (defaulted) field should be left out of the +/// generated openapi +type OpenApiExclude = Bool; + +/// API annotation to specify additional api +/// information. The Json should have +/// exactly the structure of the OpenApi +/// info map +type OpenApiInfo = Json; + +/// API decl annotation to specify +/// the security schema in use +union SecurityScheme { + Void httpBearer; + HeaderApiKeyScheme apiKey; +}; + +struct HeaderApiKeyScheme { + String headerName; +}; + +}; + diff --git a/adl/tests/test31/lib/common/http.adl-java b/adl/tests/test31/lib/common/http.adl-java new file mode 100644 index 00000000..79db148e --- /dev/null +++ b/adl/tests/test31/lib/common/http.adl-java @@ -0,0 +1,7 @@ +module common.http +{ + +import adlc.config.java.*; + +annotation JavaPackage "au.com.helixta.adl.common.http"; +}; diff --git a/adl/tests/test31/lib/common/strings.adl b/adl/tests/test31/lib/common/strings.adl new file mode 100644 index 00000000..e080e8ce --- /dev/null +++ b/adl/tests/test31/lib/common/strings.adl @@ -0,0 +1,46 @@ +/// Some string type aliases, with attributes to derive UI validation +module common.strings +{ + +import common.ui.ValidRegex; + +/// A string that isn't empty, and isn't only whitespace. +type StringNE = String; + +annotation StringNE ValidRegex { + "regex" : "^.*\\S+.*$", + "description" : "non empty", + "returnGroup" : 0 +}; + +/// An alphanumeric string, with hyphens for separation. +type StringANH = String; + +/// A multi line, free-form text string +type StringML = String; + +annotation StringANH ValidRegex { + "regex" : "^[A-Za-z][A-Za-z0-9-]*$", + "description" : "alphanumeric", + "returnGroup" : 0 +}; + +/// An email address +type EmailAddress = String; + +// see https://stackoverflow.com/questions/201323/how-to-validate-an-email-address-using-a-regular-expression +// The regex below allows whitespace before and after the email address, but the first match group excludes these. +annotation EmailAddress ValidRegex { + "regex" : "^\\s*((?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\"(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x21\\x23-\\x5b\\x5d-\\x7f]|\\\\[\\x01-\\x09\\x0b\\x0c\\x0e-\\x7f])*\")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\\.){3}(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x21-\\x5a\\x53-\\x7f]|\\\\[\\x01-\\x09\\x0b\\x0c\\x0e-\\x7f])+)\\]))\\s*$", + "description" : "an email address", + "returnGroup" : 1 +}; + +/// A markdown text string +type StringMD = String; + +/// A password, which cannot be empty. Other constraints +/// are application specific. +type Password = String; + +}; diff --git a/adl/tests/test31/lib/common/tabular.adl b/adl/tests/test31/lib/common/tabular.adl new file mode 100644 index 00000000..1f8ca5a3 --- /dev/null +++ b/adl/tests/test31/lib/common/tabular.adl @@ -0,0 +1,83 @@ +/// Common definitions for querying tabular data +module common.tabular +{ + +import common.strings.StringNE; +import common.Instant; +import common.LocalDate; + +type FieldName = StringNE; + +struct ExprLike { Expr expr; String pattern; Bool caseSensitive = true; }; +struct ExprIn { Expr expr; Vector exprs; }; +struct ExprComparison { Expr expr1; Expr expr2; }; + +union FieldPredicate { + ExprComparison equalTo; + ExprIn in; + ExprLike like; + Expr isnull; + FieldPredicate not; + ExprComparison greaterThan; + ExprComparison lessThan; + Vector and; + Vector or; + Bool literal; +}; + +union Expr { + String string; + Int32 int; + Bool bool; + LocalDate date; + Instant instant; + + FieldName field; + Void currentDate; + Vector concat; +}; + +union SortDirection { + Void ascending; + Void descending; +}; + +struct SortField { FieldName field; SortDirection direction; }; + + +// A view of a table +struct TableView +{ + // The columns to show + Vector columns; + + // A filter controlling which rows to show + FieldPredicate filter = {"literal":true}; + + // How the results should be sorted + Vector sorting = []; +}; + +// The structure we pass to specify a page of values to be +// loaded. +struct TableQuery { + // By default we match all rows + FieldPredicate filter = {"literal":true}; + + // Sorting order + Vector sorting = []; + + // Pagination offset + Int32 offset = 0; + + // Number of items to return (-1 => all) + Int32 count = -1; +}; + +// A trivial single element struct, to enable non-struct +// values to be shown in tables +struct SingleField { + T value; +}; + +}; diff --git a/adl/tests/test31/lib/common/tabular.adl-java b/adl/tests/test31/lib/common/tabular.adl-java new file mode 100644 index 00000000..ec8c810b --- /dev/null +++ b/adl/tests/test31/lib/common/tabular.adl-java @@ -0,0 +1,8 @@ +module common.tabular +{ + +import adlc.config.java.*; + +annotation JavaPackage "au.com.helixta.adl.common.tabular"; + +}; diff --git a/adl/tests/test31/lib/common/ui.adl b/adl/tests/test31/lib/common/ui.adl new file mode 100644 index 00000000..82331de5 --- /dev/null +++ b/adl/tests/test31/lib/common/ui.adl @@ -0,0 +1,51 @@ +module common.ui +{ + +import sys.types.Pair; + +// UI Annotations + +// field annotation specifying the label to be shown on forms and +// table headings +type FormLabel = String; + +// field annotation allocating a field to a display group. An empty +// group key means that the field is to be shown ungrouped. +type FormGroupKey = String; + +// struct annotation for groups +struct FormGroups { + // The default group for unannotated fields + FormGroupKey defaultKey; + + // The text labels for each group + Vector> labels; +}; + +/// An field/type alias annotation to constrain the +/// values allowed by a string to the enumerated values +struct ValidValues { + /// The allowed values + Vector values; + + /// A (short) user readable string describing the + /// expected text. + String description; +}; + +/// An field/type alias annotation to constrain the +/// values allowed by a string to a regular expression +struct ValidRegex { + /// The regexp that must be matched + String regex; + + /// A (short) user readable string describing the + /// expected text. + String description; + + /// The regex group index to return if matches + /// 0 is the entire string + Int8 returnGroup = 0; +}; + +}; diff --git a/adl/tests/test31/lib/common/ui.adl-java b/adl/tests/test31/lib/common/ui.adl-java new file mode 100644 index 00000000..0b9567be --- /dev/null +++ b/adl/tests/test31/lib/common/ui.adl-java @@ -0,0 +1,7 @@ +module common.ui +{ + +import adlc.config.java.*; + +annotation JavaPackage "au.com.helixta.adl.common.ui"; +}; diff --git a/adl/tests/test31/proj/adl.pkg.json b/adl/tests/test31/proj/adl.pkg.json new file mode 100644 index 00000000..151f1e96 --- /dev/null +++ b/adl/tests/test31/proj/adl.pkg.json @@ -0,0 +1,4 @@ +{ + "path" : "github.com/adl-lang/adl/adl/tests/test31/proj", + "adlc": "0.0.0" +} \ No newline at end of file diff --git a/adl/tests/test31/proj/protoclient/protoapp/api.adl b/adl/tests/test31/proj/protoclient/protoapp/api.adl new file mode 100644 index 00000000..31d0b441 --- /dev/null +++ b/adl/tests/test31/proj/protoclient/protoapp/api.adl @@ -0,0 +1,140 @@ +module protoclient.protoapp.api { + +import common.Instant; +import common.db.WithDbId; +import common.config.log.LogLevel; +import common.tabular.TableQuery; +import common.Paginated; +import common.Unit; +import common.http.HttpPost; +import common.http.HttpGet; +import common.strings.Password; +import common.strings.StringML; +import common.strings.StringNE; + +import protoclient.protoapp.db.AppUser; +import protoclient.protoapp.db.AppUserId; +import protoclient.protoapp.db.MessageId; + +/// The app API +struct ApiRequests { + /// Login a user + HttpPost>> with_prim = { + "path": "/login", + "security" : "public" + }; + + /// Login a user + HttpPost login = { + "path": "/login", + "security" : "public" + }; + + /// Post a message to the noticeboard + HttpPost newMessage = { + "path": "/messages/new", + "security" : "token" + }; + + /// Get recent noticeboard messages + HttpPost > recentMessages = { + "path": "/messages/recent", + "security" : "token" + }; + + /// Gets the logged in user details + // NOTE: Fails with 401 if the token is invalid or user does not exist + HttpGet whoAmI = { + "path": "/whoami", + "security" : "token" + }; + + /// Create a new user + HttpPost createUser = { + "path": "/users/create", + "security" : {"tokenWithRole": "admin"} + }; + + /// Update an existing user + HttpPost, Unit> updateUser = { + "path": "/users/update", + "security" : {"tokenWithRole": "admin"} + }; + + /// Delete an existing user + HttpPost deleteUser = { + "path": "/users/delete", + "security" : {"tokenWithRole": "admin"} + }; + + /// Query existing users sorted and filters according to the + /// TableQuery request. + HttpPost>> queryUsers = { + "path": "/users/query", + "security" : {"tokenWithRole": "admin"} + }; + + /// Logs an error from the client app without user information, i.e. when the + /// user has not logged in + HttpPost logClientErrorPublic = { + "path": "/client/log/public", + "security": "public", + "rateLimit": { "maxRequests": 6, "perTimeUnit": "minute" } + }; + + /// Logs an error from the client app and includes user information + HttpPost logClientErrorUser = { + "path": "/client/log/user", + "security": "token" + }; +}; + +struct UserProfile { + AppUserId id; + String fullname; + String email; + Bool isAdmin; +}; + +/// Details for a user +struct UserReq { + String fullname; + String email; + Password password; +}; + +struct LoginReq { + StringNE email; + Password password; +}; + +union LoginResp { + StringNE accessToken; + Void invalidCredentials; +}; + +struct NewMessageReq { + StringML message; +}; + +struct RecentMessagesReq { + // Pagination offset + Int32 offset = 0; + + // Number of items to return + Int32 count = 20; +}; + +struct Message { + MessageId id; + Instant postedAt; + String userFullName; + StringML message; +}; + +struct ClientLogReq { + LogLevel level; + String error; + Nullable stacktrace; +}; +}; diff --git a/adl/tests/test31/proj/protoclient/protoapp/config.adl b/adl/tests/test31/proj/protoclient/protoapp/config.adl new file mode 100644 index 00000000..481975ff --- /dev/null +++ b/adl/tests/test31/proj/protoclient/protoapp/config.adl @@ -0,0 +1,51 @@ +module protoclient.protoapp.config { + +import common.config.frontend.ThreadPoolConfig; +import common.config.log.LogConfig; +import common.config.log.FluentdConfig; +import common.config.db.PostgreSqlConfig; + +/// Configuration for the server +struct ServerConfig { + /// Port to listen for requests + Int16 port; + + /// Environment name, e.g. dev, uat, prod + String environment; + + /// Name of the release / code version + String releaseName; + + /// The shared secret that will be present on requests + /// made by the cron webhook system. + String cronSharedSecret; + + /// Secret used to sign JWT tokens + String jwtSecret; + + /// Database config + PostgreSqlConfig db; + + /// Server-side logging config + LogConfig logging; + + /// Client-side logging config + /// If not provided all client logs will appear in the server logs + Nullable clientLogging = null; + + /// Frontend threadpool configuration + ThreadPoolConfig threadPool; + + // If set, apply automatic migrations on startup + Nullable flyway = null; +}; + +type ClientLogConfig = FluentdConfig; + +union LocalFlywayConfig { + Void fastdev; + Void deployed; + common.flyway.internals.FlywayCommand advanced; +}; + +}; diff --git a/adl/tests/test31/proj/protoclient/protoapp/db.adl b/adl/tests/test31/proj/protoclient/protoapp/db.adl new file mode 100644 index 00000000..2445717f --- /dev/null +++ b/adl/tests/test31/proj/protoclient/protoapp/db.adl @@ -0,0 +1,57 @@ +module protoclient.protoapp.db { + +import common.strings.StringML; +import common.strings.StringNE; +import common.db.DbTable; +import common.db.DbKey; +import common.Instant; + + +struct Trade { + Vector commodities; + Instant date; +}; + +annotation Trade DbTable { + "withIdPrimaryKey" : true +}; + + +struct Commodity { + String code; +}; + +/// Details for a user +struct AppUser { + StringNE fullname; + StringNE email; + Bool isAdmin; + StringNE hashedPassword = ""; +}; + +// Annotation to specify that there is a db table for user, +// with an id primary key, and a index over username. +annotation AppUser DbTable { + "withIdPrimaryKey" : true, + "indexes" : [["email"]], + "label" : ["fullname"] +}; + +type AppUserId = DbKey; + +/// messages posted on the noticeboard +struct Message { + Instant postedAt; + DbKey postedBy; + StringML message; +}; + +annotation Message DbTable { + "withIdPrimaryKey" : true, + "indexes" : [["posted_at"]] +}; + +type MessageId = DbKey; + + +}; diff --git a/adl/tests/test31/proj/protoclient/protoapp/uiconfig.adl b/adl/tests/test31/proj/protoclient/protoapp/uiconfig.adl new file mode 100644 index 00000000..aa62bfb2 --- /dev/null +++ b/adl/tests/test31/proj/protoclient/protoapp/uiconfig.adl @@ -0,0 +1,17 @@ +module protoclient.protoapp.uiconfig { + +import common.config.log.ClientRollbarConfig; + +/// Configuration for the web frontend. +struct UiConfig { + /// Application title + String title = ""; + + /// Environment name, e.g. dev, uat, prod + String environment; + + /// Name of the release / code version + String releaseName; +}; + +}; diff --git a/rust/compiler/src/adlgen/adlc/packaging.rs b/rust/compiler/src/adlgen/adlc/packaging.rs index 70e56e82..d658ef7c 100644 --- a/rust/compiler/src/adlgen/adlc/packaging.rs +++ b/rust/compiler/src/adlgen/adlc/packaging.rs @@ -1,51 +1,67 @@ // @generated from adl module adlc.packaging +use crate::adlrt::custom::sys::types::pair::Pair; use serde::Deserialize; use serde::Serialize; +pub type AdlWorkspace0 = AdlWorkspace; + +pub type AdlWorkspace1 = AdlWorkspace>; + /** * Expected to live in a file named `adl.work.json` */ #[derive(Clone,Debug,Deserialize,Eq,Hash,PartialEq,Serialize)] -pub struct AdlWorkspace { +pub struct AdlWorkspace { /** * Version */ pub adlc: String, + #[serde(default="AdlWorkspace::::def_default_gen_options")] + #[serde(rename="defaultGenOptions")] + pub default_gen_options: Vec, + #[serde(rename="use")] - pub r#use: AdlPackageRefs, + pub r#use: Vec, } -impl AdlWorkspace { - pub fn new(adlc: String, r#use: AdlPackageRefs) -> AdlWorkspace { +impl AdlWorkspace { + pub fn new(adlc: String, r#use: Vec) -> AdlWorkspace { AdlWorkspace { adlc: adlc, + default_gen_options: AdlWorkspace::::def_default_gen_options(), r#use: r#use, } } + + pub fn def_default_gen_options() -> Vec { + vec![] + } } pub type AdlPackageRefs = Vec; #[derive(Clone,Debug,Deserialize,Eq,Hash,PartialEq,Serialize)] pub struct AdlPackageRef { - /** - * must be a path to a directory directly under the folder containing the `adl.work.json` file. - */ - pub root: String, + pub path: String, + #[serde(default="AdlPackageRef::def_gen_options")] #[serde(rename="genOptions")] pub gen_options: Vec, } impl AdlPackageRef { - pub fn new(root: String, gen_options: Vec) -> AdlPackageRef { + pub fn new(path: String) -> AdlPackageRef { AdlPackageRef { - root: root, - gen_options: gen_options, + path: path, + gen_options: AdlPackageRef::def_gen_options(), } } + + pub fn def_gen_options() -> Vec { + vec![] + } } #[derive(Clone,Debug,Deserialize,Eq,Hash,PartialEq,Serialize)] @@ -166,7 +182,11 @@ pub enum ReferenceableScopeOption { */ #[derive(Clone,Debug,Deserialize,Eq,Hash,PartialEq,Serialize)] pub struct AdlPackage { - pub pkg: PackageDirective, + pub path: String, + + #[serde(default="AdlPackage::def_global_alias")] + #[serde(rename="globalAlias")] + pub global_alias: Option, /** * Version @@ -187,9 +207,10 @@ pub struct AdlPackage { } impl AdlPackage { - pub fn new(pkg: PackageDirective, adlc: String) -> AdlPackage { + pub fn new(path: String, adlc: String) -> AdlPackage { AdlPackage { - pkg: pkg, + path: path, + global_alias: AdlPackage::def_global_alias(), adlc: adlc, requires: AdlPackage::def_requires(), excludes: AdlPackage::def_excludes(), @@ -198,6 +219,10 @@ impl AdlPackage { } } + pub fn def_global_alias() -> Option { + None + } + pub fn def_requires() -> Vec { vec![] } diff --git a/rust/compiler/src/cli/mod.rs b/rust/compiler/src/cli/mod.rs index 6d9c94a0..b8185470 100644 --- a/rust/compiler/src/cli/mod.rs +++ b/rust/compiler/src/cli/mod.rs @@ -3,6 +3,8 @@ use std::path::{Path, PathBuf}; use anyhow::{Error, anyhow}; use clap::{Args, Parser}; use std::str::{FromStr}; + +use crate::processing::loader::loader_from_search_paths; // use std::path{Path, PathBuf}; pub mod ast; @@ -19,7 +21,10 @@ pub fn run_cli() -> i32 { Command::Verify(opts) => verify::verify(&opts), Command::Ast(opts) => ast::ast(&opts), Command::Rust(opts) => rust::rust(&opts), - Command::Tsgen(opts) => tsgen::tsgen(&opts), + Command::Tsgen(opts) => { + let loader = loader_from_search_paths(&opts.search.path); + tsgen::tsgen(loader, &opts) + }, Command::WriteStdlib(opts) => crate::adlstdlib::dump(&opts), }; match r { diff --git a/rust/compiler/src/cli/tsgen/mod.rs b/rust/compiler/src/cli/tsgen/mod.rs index f400b553..9e6e90b3 100644 --- a/rust/compiler/src/cli/tsgen/mod.rs +++ b/rust/compiler/src/cli/tsgen/mod.rs @@ -15,7 +15,7 @@ use genco::prelude::*; use crate::adlgen::sys::adlast2::{self as adlast}; use crate::adlgen::sys::adlast2::{Module, Module1, TypeExpr, TypeRef}; -use crate::processing::loader::loader_from_search_paths; +use crate::processing::loader::{loader_from_search_paths, AdlLoader}; use crate::processing::resolver::Resolver; use crate::processing::writer::TreeWriter; @@ -48,8 +48,7 @@ const TSC_B64: &[u8] = b"import {fromByteArray as b64Encode, toByteArray as b64Decode} from 'base64-js'"; const DENO_B64: &[u8] = b"import {encode as b64Encode, decode as b64Decode} from 'https://deno.land/std@0.97.0/encoding/base64.ts'"; -pub fn tsgen(opts: &TsOpts) -> anyhow::Result<()> { - let loader = loader_from_search_paths(&opts.search.path); +pub fn tsgen(loader: Box, opts: &TsOpts) -> anyhow::Result<()> { let mut resolver = Resolver::new(loader); for m in &opts.modules { let r = resolver.add_module(m); diff --git a/rust/compiler/src/cli/workspace.rs b/rust/compiler/src/cli/workspace.rs index c5e4014e..cf59b5dd 100644 --- a/rust/compiler/src/cli/workspace.rs +++ b/rust/compiler/src/cli/workspace.rs @@ -1,14 +1,99 @@ -use std::fs; -use std::path::{Path, PathBuf}; -use std::env; +use std::path::PathBuf; +use std::{env, fs}; + +use anyhow::anyhow; + +use serde::Deserialize; + +use crate::adlgen::adlc::packaging::{AdlPackage, AdlWorkspace0, AdlWorkspace1, GenOptions}; +use crate::adlrt::custom::sys::types::pair::Pair; +use crate::processing::loader::loader_from_workspace; + +use super::{tsgen, TsStyle}; +use super::TsOpts; pub(crate) fn workspace(opts: &super::GenOpts) -> Result<(), anyhow::Error> { - let pkg_defs = collect_work_and_pkg(&opts.dir); - println!("{:?}", pkg_defs); + let pkg_defs = collect_work_and_pkg(&opts.dir)?; + let wrk1 = collection_to_workspace(pkg_defs)?; + // println!("{:?}", &wrk1); + for pkg in &wrk1.1.r#use { + for gen in &pkg.0 .0.gen_options { + match gen { + GenOptions::Tsgen(opts) => { + let tsopts = TsOpts { + search: super::AdlSearchOpts { path: vec![] }, + output: super::OutputOpts { + outputdir: PathBuf::from(opts.outputs.output_dir.clone()), + manifest: opts.outputs.manifest.clone().map(|m| PathBuf::from(m)), + }, + include_rt: opts.include_runtime, + runtime_dir: opts.runtime_dir.clone(), + generate_transitive: opts.generate_transitive, + include_resolver: opts.include_resolver, + ts_style: match opts.ts_style { + crate::adlgen::adlc::packaging::TsStyle::Tsc => Some(TsStyle::Tsc), + crate::adlgen::adlc::packaging::TsStyle::Deno => Some(TsStyle::Deno), + }, + modules: match &opts.modules { + crate::adlgen::adlc::packaging::ModuleSrc::All => { + let mut ms = vec![]; + ms + }, + crate::adlgen::adlc::packaging::ModuleSrc::Modules(ms) => ms.clone(), + }, + capitalize_branch_names_in_types: opts.capitalize_branch_names_in_types, + capitalize_type_names: opts.capitalize_type_names, + }; + let loader = loader_from_workspace(wrk1.0.clone(), wrk1.1.clone()); + println!("TsGen for pkg {:?} in workspace {:?} output dir {}", pkg.0.0, wrk1.0, opts.outputs.output_dir); + tsgen::tsgen(loader, &tsopts)?; + } + } + } + } Ok(()) } -const ADL_PKG_FILES: &[(&str, PkgDef)] = &[("adl.pkg.json", PkgDef::Pkg), ("adl.work.json", PkgDef::Work)]; +fn collection_to_workspace( + pkg_defs: Vec<(PkgDef, PathBuf, &str)>, +) -> Result<(PathBuf, AdlWorkspace1), anyhow::Error> { + for porw in pkg_defs { + let porw_path = porw.1.join(porw.2); + let content = fs::read_to_string(&porw_path)?; + let mut de = serde_json::Deserializer::from_str(&content); + match porw.0 { + PkgDef::Pkg => { + // let pkg = AdlPackage::deserialize(&mut de).map_err(|e| anyhow!("{:?}: {}", porw_path, e.to_string()))?; + // println!("pkg {:?}", pkg); + } + PkgDef::Work => { + let wrk0 = AdlWorkspace0::deserialize(&mut de) + .map_err(|e| anyhow!("{:?}: {}", porw_path, e.to_string()))?; + let mut wrk1 = AdlWorkspace1 { + adlc: wrk0.adlc.clone(), + default_gen_options: wrk0.default_gen_options.clone(), + r#use: vec![], + }; + for p in wrk0.r#use.iter() { + let p_path = porw.1.join(&p.path).join("adl.pkg.json"); + let content = fs::read_to_string(&p_path)?; + let mut de = serde_json::Deserializer::from_str(&content); + let pkg = AdlPackage::deserialize(&mut de) + .map_err(|e| anyhow!("{:?}: {}", p_path, e.to_string()))?; + wrk1.r#use.push(Pair::new(p.clone(), pkg)); + } + // println!("wrk {:?}", wrk1); + return Ok((porw.1, wrk1)); + } + } + } + Err(anyhow!("No workspace found")) +} + +const ADL_PKG_FILES: &[(&str, PkgDef)] = &[ + ("adl.pkg.json", PkgDef::Pkg), + ("adl.work.json", PkgDef::Work), +]; #[derive(Debug, Copy, Clone, PartialEq)] enum PkgDef { @@ -16,20 +101,22 @@ enum PkgDef { Work, } -fn collect_work_and_pkg(start_dir: &String) -> Vec<(PkgDef,PathBuf)> { +fn collect_work_and_pkg(start_dir: &String) -> Result, anyhow::Error> { let mut res = vec![]; - let mut current_dir = PathBuf::from(start_dir); - + let current_dir = env::current_dir()?; + let current_dir = current_dir.join(start_dir); + let mut current_dir = current_dir.canonicalize()?; + loop { for f in ADL_PKG_FILES { let file_path = current_dir.join(f.0); if file_path.exists() { - res.push((f.1, current_dir.clone())); + res.push((f.1, current_dir.clone(), f.0)); } } if !current_dir.pop() { break; } } - res -} \ No newline at end of file + Ok(res) +} diff --git a/rust/compiler/src/processing/loader.rs b/rust/compiler/src/processing/loader.rs index 9f3bba4c..b15b6b88 100644 --- a/rust/compiler/src/processing/loader.rs +++ b/rust/compiler/src/processing/loader.rs @@ -1,16 +1,22 @@ use anyhow::anyhow; use nom::Finish; use nom_locate::LocatedSpan; +use std::collections::HashMap; use std::fs; use std::io::ErrorKind; use std::path::PathBuf; +use crate::adlgen::adlc::packaging::AdlWorkspace1; use crate::adlgen::sys::adlast2 as adlast; use crate::parser::{convert_error, raw_module}; use crate::processing::annotations::apply_explicit_annotations_and_serialized_name; use super::Module0; +pub fn loader_from_workspace(root: PathBuf, workspace: AdlWorkspace1) -> Box { + Box::new(WorkspaceLoader{ root, workspace, loaders: HashMap::new() }) +} + pub fn loader_from_search_paths(paths: &Vec) -> Box { let loaders = paths.iter().map(loader_from_dir_tree).collect(); Box::new(MultiLoader::new(loaders)) @@ -25,6 +31,41 @@ pub trait AdlLoader { fn load(&mut self, module_name: &adlast::ModuleName) -> Result, anyhow::Error>; } +pub struct WorkspaceLoader { + root: PathBuf, + workspace: AdlWorkspace1, + // embedded: EmbeddedStdlibLoader, + loaders: HashMap>, +} + + +impl AdlLoader for WorkspaceLoader { + fn load(&mut self, module_name: &adlast::ModuleName) -> Result, anyhow::Error> { + for pkg in &self.workspace.r#use { + let pkg_path = pkg.0.1.path.as_str(); + let pkg_name = if module_name.starts_with(pkg_path) { + Some(pkg.0.1.path.clone()) + } else if let Some(alias) = pkg.0.1.global_alias.clone() { + if module_name.starts_with(alias.as_str()) { + Some(alias) + } else { + None + } + } else { + None + }; + if let Some(name) = pkg_name { + let loader = self + .loaders + .entry(name) + .or_insert(Box::new(DirTreeLoader::new(self.root.join(&pkg.0.0.path)))); + return loader.load(module_name); + } + } + Ok(None) + } +} + /// Combines a bunch of loaders pub struct MultiLoader { embedded: EmbeddedStdlibLoader,