From 96585653881d5965cbe1d62d8592500df5b9e8b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Ferreira?= Date: Wed, 17 Jul 2019 11:25:11 +0100 Subject: [PATCH 01/26] adapt build.sbt and fix compilation errors --- build.sbt | 36 +++++++++---------- project/build.properties | 2 +- .../scala/sangria/introspection/package.scala | 2 +- .../scala/sangria/parser/SourceMapper.scala | 4 +-- .../schema/AstSchemaMaterializer.scala | 4 +-- .../ResolverBasedAstSchemaBuilder.scala | 9 +++-- src/main/scala/sangria/schema/Schema.scala | 5 +-- .../sangria/schema/SchemaValidationRule.scala | 2 +- src/main/scala/sangria/schema/package.scala | 14 ++++---- .../scala/sangria/util/TrieMapCache.scala | 2 +- .../sangria/validation/ValidatorStack.scala | 2 +- .../rules/OverlappingFieldsCanBeMerged.scala | 4 +-- 12 files changed, 42 insertions(+), 44 deletions(-) diff --git a/build.sbt b/build.sbt index 75e62ce5..ae4d6700 100644 --- a/build.sbt +++ b/build.sbt @@ -6,8 +6,8 @@ description := "Scala GraphQL implementation" homepage := Some(url("http://sangria-graphql.org")) licenses := Seq("Apache License, ASL Version 2.0" → url("http://www.apache.org/licenses/LICENSE-2.0")) -scalaVersion := "2.12.7" -crossScalaVersions := Seq("2.11.11", "2.12.7") +scalaVersion := "2.13.0" +crossScalaVersions := Seq("2.11.11", "2.12.7", scalaVersion.value) scalacOptions ++= Seq( "-deprecation", @@ -15,42 +15,42 @@ scalacOptions ++= Seq( "-Xlint:-missing-interpolator,-unused,_") scalacOptions ++= { - if (scalaVersion.value startsWith "2.12") - Seq.empty - else + if (scalaVersion.value startsWith "2.11") Seq("-target:jvm-1.7") + else + Seq.empty } testOptions in Test += Tests.Argument(TestFrameworks.ScalaTest, "-oF") libraryDependencies ++= Seq( // AST Parser - "org.parboiled" %% "parboiled" % "2.1.4", + "org.parboiled" %% "parboiled" % "2.1.7", // AST Visitor - "org.sangria-graphql" %% "macro-visit" % "0.1.1", + "org.sangria-graphql" %% "macro-visit" % "0.1.2-SNAPSHOT", // Marshalling - "org.sangria-graphql" %% "sangria-marshalling-api" % "1.0.3", + "org.sangria-graphql" %% "sangria-marshalling-api" % "1.0.4-SNAPSHOT", // Streaming - "org.sangria-graphql" %% "sangria-streaming-api" % "1.0.0", + "org.sangria-graphql" %% "sangria-streaming-api" % "1.0.1-SNAPSHOT", // Macros "org.scala-lang" % "scala-reflect" % scalaVersion.value, // Testing - "org.scalatest" %% "scalatest" % "3.0.5" % "test", - "org.sangria-graphql" %% "sangria-marshalling-testkit" % "1.0.1" % Test, - "org.sangria-graphql" %% "sangria-spray-json" % "1.0.1" % Test, - "org.sangria-graphql" %% "sangria-argonaut" % "1.0.0" % Test, - "org.sangria-graphql" %% "sangria-ion" % "1.0.0" % Test, - "org.sangria-graphql" %% "sangria-monix" % "1.0.0" % Test, - "org.sangria-graphql" %% "sangria-rxscala" % "1.0.0" % Test, - "eu.timepit" %% "refined" % "0.9.2" % Test, + "org.scalatest" %% "scalatest" % "3.0.8" % "test", +// "org.sangria-graphql" %% "sangria-marshalling-testkit" % "1.0.1" % Test, +// "org.sangria-graphql" %% "sangria-spray-json" % "1.0.1" % Test, +// "org.sangria-graphql" %% "sangria-argonaut" % "1.0.0" % Test, +// "org.sangria-graphql" %% "sangria-ion" % "1.0.0" % Test, +// "org.sangria-graphql" %% "sangria-monix" % "1.0.0" % Test, +// "org.sangria-graphql" %% "sangria-rxscala" % "1.0.0" % Test, + "eu.timepit" %% "refined" % "0.9.8" % Test, // CATs - "net.jcazevedo" %% "moultingyaml" % "0.4.0" % Test, + "net.jcazevedo" %% "moultingyaml" % "0.4.1" % Test, "io.github.classgraph" % "classgraph" % "4.0.6" % Test ) diff --git a/project/build.properties b/project/build.properties index 0cd8b079..c0bab049 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.2.3 +sbt.version=1.2.8 diff --git a/src/main/scala/sangria/introspection/package.scala b/src/main/scala/sangria/introspection/package.scala index b14461a0..32e710a5 100644 --- a/src/main/scala/sangria/introspection/package.scala +++ b/src/main/scala/sangria/introspection/package.scala @@ -305,7 +305,7 @@ package object introspection { __Schema :: __TypeKind :: __DirectiveLocation :: __Type :: __Field :: __InputValue :: __EnumValue :: __Directive :: Nil val IntrospectionTypesByName: Map[String, Type with Named] = - IntrospectionTypes.groupBy(_.name).mapValues(_.head) + IntrospectionTypes.groupBy(_.name).mapValues(_.head).toMap def introspectionQuery: ast.Document = introspectionQuery() diff --git a/src/main/scala/sangria/parser/SourceMapper.scala b/src/main/scala/sangria/parser/SourceMapper.scala index 61439a29..79f11b0c 100644 --- a/src/main/scala/sangria/parser/SourceMapper.scala +++ b/src/main/scala/sangria/parser/SourceMapper.scala @@ -3,8 +3,6 @@ package sangria.parser import org.parboiled2.ParserInput import sangria.ast.AstLocation -import scala.collection.breakOut - trait SourceMapper { def id: String def source: String @@ -23,7 +21,7 @@ class DefaultSourceMapper(val id: String, val parserInput: ParserInput) extends } class AggregateSourceMapper(val id: String, val delegates: Vector[SourceMapper]) extends SourceMapper { - lazy val delegateById: Map[String, SourceMapper] = delegates.map(d ⇒ d.id → d)(breakOut) + lazy val delegateById: Map[String, SourceMapper] = delegates.iterator.map(d ⇒ d.id → d).toMap lazy val source = delegates.map(_.source.trim) mkString "\n\n" diff --git a/src/main/scala/sangria/schema/AstSchemaMaterializer.scala b/src/main/scala/sangria/schema/AstSchemaMaterializer.scala index 5f6efd6d..3dcbaea5 100644 --- a/src/main/scala/sangria/schema/AstSchemaMaterializer.scala +++ b/src/main/scala/sangria/schema/AstSchemaMaterializer.scala @@ -86,7 +86,7 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A schema else { existingSchema = Some(schema) - existingDefsMat = schema.allTypes.mapValues(MaterializedType(existingOrigin, _)) + existingDefsMat = schema.allTypes.mapValues(MaterializedType(existingOrigin, _)).toMap val queryType = getTypeFromDef(existingOrigin, schema.query) @@ -703,4 +703,4 @@ object AstSchemaMaterializer { def extendSchema[Ctx, Val](schema: Schema[Ctx, Val], document: ast.Document, builder: AstSchemaBuilder[Ctx] = AstSchemaBuilder.default): Schema[Ctx, Val] = new AstSchemaMaterializer[Ctx](document, builder).extend(schema) -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/schema/ResolverBasedAstSchemaBuilder.scala b/src/main/scala/sangria/schema/ResolverBasedAstSchemaBuilder.scala index f61a8cb8..51e9642c 100644 --- a/src/main/scala/sangria/schema/ResolverBasedAstSchemaBuilder.scala +++ b/src/main/scala/sangria/schema/ResolverBasedAstSchemaBuilder.scala @@ -2,7 +2,6 @@ package sangria.schema import language.{existentials, postfixOps} import sangria.ast -import sangria.ast._ import sangria.execution.MaterializedSchemaValidationError import sangria.marshalling.{InputUnmarshaller, ResultMarshallerForType, ToInput} import sangria.renderer.SchemaRenderer @@ -401,7 +400,7 @@ object ResolverBasedAstSchemaBuilder { case i: BigInt if !i.isValidLong ⇒ invalidType("Long", value) case i: BigInt ⇒ i.longValue case d: Double if d.isWhole ⇒ d.toLong - case d: BigDecimal if d.isValidLong ⇒ d.longValue() + case d: BigDecimal if d.isValidLong ⇒ d.longValue case v: String ⇒ safe(v.toLong, "Long", value) case _ ⇒ invalidType("Long", value) } @@ -430,10 +429,10 @@ object ResolverBasedAstSchemaBuilder { case i: Int ⇒ i.toDouble case i: Long ⇒ i.toDouble case i: BigInt if !i.isValidDouble ⇒ invalidType("Float", value) - case i: BigInt ⇒ i.doubleValue() + case i: BigInt ⇒ i.doubleValue case d: Double ⇒ d case d: BigDecimal if !d.isDecimalDouble ⇒ invalidType("Float", value) - case d: BigDecimal ⇒ d.doubleValue() + case d: BigDecimal ⇒ d.doubleValue case v: String ⇒ safe(v.toDouble, "Float", value) case _ ⇒ invalidType("Float", value) } @@ -497,7 +496,7 @@ object ResolverBasedAstSchemaBuilder { val resolversByName = resolvers.groupBy(_.directiveName) val stack = ValidatorStack.empty[ast.AstNode] - AstVisitor.visit(schema, AstVisitor( + ast.AstVisitor.visit(schema, ast.AstVisitor( onEnter = { case node: ast.WithDirectives ⇒ stack.push(node) diff --git a/src/main/scala/sangria/schema/Schema.scala b/src/main/scala/sangria/schema/Schema.scala index 727164d4..806b28b8 100644 --- a/src/main/scala/sangria/schema/Schema.scala +++ b/src/main/scala/sangria/schema/Schema.scala @@ -884,7 +884,7 @@ case class Schema[Ctx, Val]( lazy val outputTypes = types collect {case (name, (_, tpe: OutputType[_])) ⇒ name → tpe} lazy val scalarTypes = types collect {case (name, (_, tpe: ScalarType[_])) ⇒ name → tpe} lazy val unionTypes: Map[String, UnionType[_]] = - types.filter(_._2._2.isInstanceOf[UnionType[_]]).mapValues(_._2.asInstanceOf[UnionType[_]]) + types.filter(_._2._2.isInstanceOf[UnionType[_]]).mapValues(_._2.asInstanceOf[UnionType[_]]).toMap lazy val directivesByName = directives groupBy (_.name) mapValues (_.head) @@ -912,6 +912,7 @@ case class Schema[Ctx, Val]( .flatMap(objectLike ⇒ objectLike.interfaces map (_.name → objectLike)) .groupBy(_._1) .mapValues(_ map (_._2)) + .toMap } lazy val implementations: Map[String, Vector[ObjectType[_, _]]] = { @@ -1000,4 +1001,4 @@ object Schema { def buildDefinitions[Ctx](document: ast.Document, builder: AstSchemaBuilder[Ctx]) = AstSchemaMaterializer.definitions[Ctx](document, builder) -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/schema/SchemaValidationRule.scala b/src/main/scala/sangria/schema/SchemaValidationRule.scala index 07a3b9a0..4151f7cf 100644 --- a/src/main/scala/sangria/schema/SchemaValidationRule.scala +++ b/src/main/scala/sangria/schema/SchemaValidationRule.scala @@ -1,6 +1,6 @@ package sangria.schema -import sangria.ast._ +import sangria.ast.{AstLocation, Document, ObjectTypeDefinition, ObjectTypeExtensionDefinition, UnionTypeDefinition, UnionTypeExtensionDefinition} import language.higherKinds import sangria.execution._ diff --git a/src/main/scala/sangria/schema/package.scala b/src/main/scala/sangria/schema/package.scala index bea6b42d..6580ab5e 100644 --- a/src/main/scala/sangria/schema/package.scala +++ b/src/main/scala/sangria/schema/package.scala @@ -38,7 +38,7 @@ package object schema { case i: BigInt if !i.isValidLong ⇒ Left(BigLongCoercionViolation) case i: BigInt ⇒ Right(i.longValue) case d: Double if d.isWhole ⇒ Right(d.toLong) - case d: BigDecimal if d.isValidLong ⇒ Right(d.longValue()) + case d: BigDecimal if d.isValidLong ⇒ Right(d.longValue) case _ ⇒ Left(LongCoercionViolation) }, coerceInput = { @@ -82,10 +82,10 @@ package object schema { case i: Int ⇒ Right(i.toDouble) case i: Long ⇒ Right(i.toDouble) case i: BigInt if !i.isValidDouble ⇒ Left(BigDecimalCoercionViolation) - case i: BigInt ⇒ Right(i.doubleValue()) + case i: BigInt ⇒ Right(i.doubleValue) case d: Double ⇒ Right(d) case d: BigDecimal if !d.isDecimalDouble ⇒ Left(BigDecimalCoercionViolation) - case d: BigDecimal ⇒ Right(d.doubleValue()) + case d: BigDecimal ⇒ Right(d.doubleValue) case _ ⇒ Left(FloatCoercionViolation) }, coerceInput = { @@ -176,13 +176,13 @@ package object schema { BuiltinGraphQLScalars ++ BuiltinSangriaScalars val BuiltinScalarsByName: Map[String, ScalarType[_]] = - BuiltinScalars.groupBy(_.name).mapValues(_.head) + BuiltinScalars.groupBy(_.name).mapValues(_.head).toMap val BuiltinGraphQLScalarsByName: Map[String, ScalarType[_]] = - BuiltinGraphQLScalars.groupBy(_.name).mapValues(_.head) + BuiltinGraphQLScalars.groupBy(_.name).mapValues(_.head).toMap val BuiltinSangriaScalarsByName: Map[String, ScalarType[_]] = - BuiltinSangriaScalars.groupBy(_.name).mapValues(_.head) + BuiltinSangriaScalars.groupBy(_.name).mapValues(_.head).toMap val IfArg = Argument("if", BooleanType, "Included when true.") @@ -222,7 +222,7 @@ package object schema { val BuiltinDirectives = IncludeDirective :: SkipDirective :: DeprecatedDirective :: Nil val BuiltinDirectivesByName: Map[String, Directive] = - BuiltinDirectives.groupBy(_.name).mapValues(_.head) + BuiltinDirectives.groupBy(_.name).mapValues(_.head).toMap def fields[Ctx, Val](fields: Field[Ctx, Val]*): List[Field[Ctx, Val]] = fields.toList diff --git a/src/main/scala/sangria/util/TrieMapCache.scala b/src/main/scala/sangria/util/TrieMapCache.scala index f2eae739..92b4d23e 100644 --- a/src/main/scala/sangria/util/TrieMapCache.scala +++ b/src/main/scala/sangria/util/TrieMapCache.scala @@ -18,7 +18,7 @@ class TrieMapCache[Key, Value] extends Cache[Key, Value] { def getOrElseUpdate(key: Key, fn: ⇒ Value) = cache.getOrElseUpdate(key, fn) def find(fn: (Key, Value) ⇒ Boolean) = cache.find {case (key, value) ⇒ fn(key, value)} def mapToSet[R](fn: (Key, Value) ⇒ R) = cache.map {case (key, value) ⇒ fn(key, value)}.toSet - def mapValues[R](fn: Value ⇒ R) = cache.mapValues(fn) + def mapValues[R](fn: Value ⇒ R) = cache.mapValues(fn).toMap def keyExists(fn: Key ⇒ Boolean) = cache.keySet.exists(fn) def forEachValue(fn: Value ⇒ Unit) = cache.values.foreach(fn) def removeKeys(fn: Key ⇒ Boolean) = cache.keys.toVector.foreach(key ⇒ if (fn(key)) cache.remove(key)) diff --git a/src/main/scala/sangria/validation/ValidatorStack.scala b/src/main/scala/sangria/validation/ValidatorStack.scala index 8e97b5b4..dc101d1a 100644 --- a/src/main/scala/sangria/validation/ValidatorStack.scala +++ b/src/main/scala/sangria/validation/ValidatorStack.scala @@ -12,7 +12,7 @@ class ValidatorStack[T] { def head(toDrop: Int) = stack.drop(toDrop).head def headOption(toDrop: Int) = stack.drop(toDrop).headOption def nonEmpty = stack.nonEmpty - def toSeq: Seq[T] = stack + def toSeq: Seq[T] = stack.toSeq } object ValidatorStack { diff --git a/src/main/scala/sangria/validation/rules/OverlappingFieldsCanBeMerged.scala b/src/main/scala/sangria/validation/rules/OverlappingFieldsCanBeMerged.scala index 205faf1c..0d18fe9f 100644 --- a/src/main/scala/sangria/validation/rules/OverlappingFieldsCanBeMerged.scala +++ b/src/main/scala/sangria/validation/rules/OverlappingFieldsCanBeMerged.scala @@ -292,7 +292,7 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { visitedFragments1, visitedFragments2) - subfieldConflicts(conflicts, outputName, ast1, ast2) + subfieldConflicts(conflicts.toSeq, outputName, ast1, ast2) } } } @@ -464,4 +464,4 @@ private class PairSet[T] { private def addPair(a: T, b: T, areMutuallyExclusive: Boolean) = data(a → b) = areMutuallyExclusive -} \ No newline at end of file +} From 9f5f31667be055a49ec9b48c9459f4d9a02fa507 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Ferreira?= Date: Wed, 17 Jul 2019 11:31:40 +0100 Subject: [PATCH 02/26] remove unicode arrows --- src/main/scala/sangria/ast/QueryAst.scala | 422 ++-- .../execution/DeprecationTracker.scala | 2 +- .../sangria/execution/ExecutionError.scala | 10 +- .../sangria/execution/ExecutionPath.scala | 18 +- .../sangria/execution/ExecutionScheme.scala | 30 +- .../scala/sangria/execution/Executor.scala | 70 +- .../sangria/execution/FieldCollector.scala | 92 +- .../execution/InputDocumentMaterializer.scala | 24 +- .../sangria/execution/QueryReducer.scala | 38 +- .../execution/QueryReducerExecutor.scala | 80 +- .../scala/sangria/execution/Resolver.scala | 618 +++--- .../sangria/execution/ResultResolver.scala | 74 +- .../SimpleAstBasedExtensionMiddleware.scala | 2 +- .../sangria/execution/TimeMeasurement.scala | 4 +- .../execution/ValueCoercionHelper.scala | 284 +-- .../sangria/execution/ValueCollector.scala | 28 +- .../execution/batch/BatchExecutor.scala | 158 +- .../execution/deferred/DeferredResolver.scala | 4 +- .../sangria/execution/deferred/Fetcher.scala | 124 +- .../FetcherBasedDeferredResolver.scala | 232 +-- .../execution/deferred/FetcherCache.scala | 14 +- .../sangria/execution/deferred/HasId.scala | 4 +- .../scala/sangria/execution/middleware.scala | 30 +- .../introspection/IntrospectionParser.scala | 60 +- .../scala/sangria/introspection/package.scala | 138 +- .../scala/sangria/macros/AstLiftable.scala | 104 +- .../scala/sangria/macros/ParseMacro.scala | 18 +- .../macros/derive/DeriveEnumSetting.scala | 2 +- .../macros/derive/DeriveEnumTypeMacro.scala | 102 +- .../derive/DeriveInputObjectSetting.scala | 2 +- .../derive/DeriveInputObjectTypeMacro.scala | 124 +- .../macros/derive/DeriveMacroSupport.scala | 18 +- .../macros/derive/DeriveObjectSetting.scala | 6 +- .../macros/derive/DeriveObjectTypeMacro.scala | 218 +- .../scala/sangria/macros/derive/package.scala | 4 +- src/main/scala/sangria/macros/package.scala | 2 +- .../scala/sangria/marshalling/queryAst.scala | 42 +- .../sangria/parser/PositionTracking.scala | 4 +- .../scala/sangria/parser/QueryParser.scala | 204 +- .../scala/sangria/parser/SourceMapper.scala | 10 +- .../sangria/renderer/QueryRenderer.scala | 182 +- .../sangria/renderer/SchemaRenderer.scala | 136 +- .../sangria/schema/AstSchemaBuilder.scala | 98 +- .../schema/AstSchemaMaterializer.scala | 238 +-- .../sangria/schema/AstSchemaResolver.scala | 56 +- src/main/scala/sangria/schema/Context.scala | 142 +- .../schema/IntrospectionSchemaBuilder.scala | 48 +- .../IntrospectionSchemaMaterializer.scala | 72 +- .../ResolverBasedAstSchemaBuilder.scala | 280 +-- src/main/scala/sangria/schema/Schema.scala | 356 ++-- .../sangria/schema/SchemaComparator.scala | 146 +- .../sangria/schema/SchemaValidationRule.scala | 124 +- src/main/scala/sangria/schema/package.scala | 156 +- src/main/scala/sangria/util/Cache.scala | 18 +- .../sangria/util/ConcurrentHashMapCache.scala | 34 +- src/main/scala/sangria/util/StringUtil.scala | 42 +- .../scala/sangria/util/TrieMapCache.scala | 20 +- .../sangria/validation/DocumentAnalyzer.scala | 18 +- .../sangria/validation/QueryValidator.scala | 72 +- .../SchemaBasedDocumentAnalyzer.scala | 22 +- .../sangria/validation/TypeComparators.scala | 30 +- .../scala/sangria/validation/TypeInfo.scala | 114 +- .../scala/sangria/validation/Violation.scala | 128 +- .../rules/ExecutableDefinitions.scala | 20 +- .../rules/FieldsOnCorrectType.scala | 22 +- .../rules/FragmentsOnCompositeTypes.scala | 12 +- ...umentNonConflictingVariableInference.scala | 12 +- .../validation/rules/KnownArgumentNames.scala | 18 +- .../validation/rules/KnownDirectives.scala | 78 +- .../validation/rules/KnownFragmentNames.scala | 8 +- .../validation/rules/KnownTypeNames.scala | 6 +- .../rules/LoneAnonymousOperation.scala | 6 +- .../validation/rules/NoFragmentCycles.scala | 18 +- .../rules/NoUndefinedVariables.scala | 16 +- .../validation/rules/NoUnusedFragments.scala | 18 +- .../validation/rules/NoUnusedVariables.scala | 10 +- .../rules/OverlappingFieldsCanBeMerged.scala | 132 +- .../rules/PossibleFragmentSpreads.scala | 36 +- .../rules/ProvidedRequiredArguments.scala | 18 +- .../validation/rules/ScalarLeafs.scala | 14 +- .../rules/SingleFieldSubscriptions.scala | 4 +- .../rules/UniqueArgumentNames.scala | 10 +- .../rules/UniqueDirectivesPerLocation.scala | 10 +- .../rules/UniqueFragmentNames.scala | 6 +- .../rules/UniqueInputFieldNames.scala | 12 +- .../rules/UniqueOperationNames.scala | 6 +- .../rules/UniqueVariableNames.scala | 12 +- .../rules/ValuesOfCorrectType.scala | 88 +- .../rules/VariablesAreInputTypes.scala | 8 +- .../rules/VariablesInAllowedPosition.scala | 18 +- .../sangria/execution/ActionMapSpec.scala | 114 +- .../execution/ContextPassingSpec.scala | 22 +- .../execution/DeprecationTrackerSpec.scala | 36 +- .../sangria/execution/DirectivesSpec.scala | 56 +- .../execution/ExceptionHandlingSpec.scala | 226 +-- .../execution/ExecutorSchemaSpec.scala | 116 +- .../sangria/execution/ExecutorSpec.scala | 434 ++-- .../InputDocumentMaterializerSpec.scala | 24 +- .../scala/sangria/execution/ListsSpec.scala | 152 +- .../sangria/execution/MiddlewareSpec.scala | 230 +-- .../sangria/execution/MutationSpec.scala | 160 +- .../scala/sangria/execution/NotNullSpec.scala | 320 +-- .../sangria/execution/ProjectorSpec.scala | 128 +- .../sangria/execution/QueryReducerSpec.scala | 230 +-- .../sangria/execution/ScalarAliasSpec.scala | 120 +- .../execution/ScalarMiddlewareSpec.scala | 128 +- .../execution/UnionInterfaceSpec.scala | 170 +- .../execution/ValueCoercionHelperSpec.scala | 34 +- .../sangria/execution/VariablesSpec.scala | 226 +-- .../execution/batch/BatchExecutorSpec.scala | 40 +- .../deferred/DeferredResolverSpec.scala | 70 +- .../execution/deferred/FetcherSpec.scala | 570 +++--- .../introspection/IntrospectionSpec.scala | 1806 ++++++++--------- .../DeriveInputObjectTypeMacroSpec.scala | 32 +- .../derive/DeriveObjectTypeMacroSpec.scala | 56 +- .../marshalling/EnumInputTypeSpec.scala | 8 +- .../sangria/marshalling/FromInputSpec.scala | 90 +- .../sangria/marshalling/IonSupportSpec.scala | 38 +- .../marshalling/MarshallingUtilSpec.scala | 32 +- .../sangria/parser/QueryParserSpec.scala | 58 +- .../sangria/renderer/QueryRendererSpec.scala | 8 +- .../sangria/renderer/SchemaRenderSpec.scala | 70 +- src/test/scala/sangria/schema/ArgsSpec.scala | 76 +- .../schema/AstSchemaMaterializerSpec.scala | 148 +- .../sangria/schema/CustomScalarSpec.scala | 20 +- .../schema/DefaultValueApplicationSpec.scala | 42 +- .../sangria/schema/DefaultValuesSpec.scala | 104 +- .../scala/sangria/schema/EnumTypeSpec.scala | 54 +- .../IntrospectionSchemaMaterializerSpec.scala | 154 +- .../ResolverBasedAstSchemaBuilderSpec.scala | 176 +- .../sangria/schema/SchemaComparatorSpec.scala | 14 +- .../schema/SchemaConstraintsSpec.scala | 120 +- .../sangria/schema/SchemaDefinitionSpec.scala | 20 +- .../sangria/schema/SchemaExtensionSpec.scala | 112 +- .../schema/TypeFieldConstraintsSpec.scala | 102 +- .../starWars/StarWarsIntrospectionSpec.scala | 282 +-- .../sangria/starWars/StarWarsQuerySpec.scala | 242 +-- .../scala/sangria/starWars/TestData.scala | 12 +- .../scala/sangria/starWars/TestSchema.scala | 24 +- .../scala/sangria/streaming/StreamSpec.scala | 54 +- src/test/scala/sangria/util/CatsSupport.scala | 234 +-- src/test/scala/sangria/util/DebugUtil.scala | 42 +- src/test/scala/sangria/util/FileUtil.scala | 6 +- .../sangria/util/FutureResultSupport.scala | 4 +- .../scala/sangria/util/GraphQlSupport.scala | 32 +- .../scala/sangria/util/OutputMatchers.scala | 4 +- .../sangria/util/ValidationSupport.scala | 94 +- .../validation/DocumentAnalyzerSpec.scala | 4 +- .../rules/ExecutableDefinitionsSpec.scala | 4 +- .../rules/FieldsOnCorrectTypeSpec.scala | 22 +- .../rules/FragmentsOnCompositeTypesSpec.scala | 8 +- ...tNonConflictingVariableInferenceSpec.scala | 8 +- .../rules/KnownArgumentNamesSpec.scala | 16 +- .../rules/KnownDirectivesSpec.scala | 44 +- .../rules/KnownFragmentNamesSpec.scala | 6 +- .../validation/rules/KnownTypeNamesSpec.scala | 6 +- .../rules/LoneAnonymousOperationSpec.scala | 8 +- .../rules/NoFragmentCyclesSpec.scala | 30 +- .../rules/NoUndefinedVariablesSpec.scala | 40 +- .../rules/NoUnusedFragmentsSpec.scala | 10 +- .../rules/NoUnusedVariablesSpec.scala | 18 +- .../OverlappingFieldsCanBeMergedSpec.scala | 116 +- .../rules/PossibleFragmentSpreadsSpec.scala | 22 +- .../rules/ProvidedRequiredArgumentsSpec.scala | 12 +- .../validation/rules/ScalarLeafsSpec.scala | 14 +- .../rules/SingleFieldSubscriptionsSpec.scala | 8 +- .../rules/UniqueArgumentNamesSpec.scala | 12 +- .../UniqueDirectivesPerLocationSpec.scala | 14 +- .../rules/UniqueFragmentNamesSpec.scala | 4 +- .../rules/UniqueInputFieldNamesSpec.scala | 6 +- .../rules/UniqueOperationNamesSpec.scala | 6 +- .../rules/UniqueVariableNamesSpec.scala | 8 +- .../rules/ValuesOfCorrectTypeSpec.scala | 94 +- .../rules/VariablesAreInputTypesSpec.scala | 6 +- .../VariablesInAllowedPositionSpec.scala | 52 +- 175 files changed, 7572 insertions(+), 7572 deletions(-) diff --git a/src/main/scala/sangria/ast/QueryAst.scala b/src/main/scala/sangria/ast/QueryAst.scala index 519812ae..1a8730d0 100644 --- a/src/main/scala/sangria/ast/QueryAst.scala +++ b/src/main/scala/sangria/ast/QueryAst.scala @@ -13,8 +13,8 @@ import scala.util.control.Breaks._ import scala.collection.immutable.ListMap case class Document(definitions: Vector[Definition], trailingComments: Vector[Comment] = Vector.empty, location: Option[AstLocation] = None, sourceMapper: Option[SourceMapper] = None) extends AstNode with WithTrailingComments { - lazy val operations = Map(definitions collect {case op: OperationDefinition ⇒ op.name → op}: _*) - lazy val fragments = Map(definitions collect {case fragment: FragmentDefinition ⇒ fragment.name → fragment}: _*) + lazy val operations = Map(definitions collect {case op: OperationDefinition => op.name -> op}: _*) + lazy val fragments = Map(definitions collect {case fragment: FragmentDefinition => fragment.name -> fragment}: _*) lazy val source: Option[String] = sourceMapper map (_.source) def operationType(operationName: Option[String] = None): Option[OperationType] = @@ -26,7 +26,7 @@ case class Document(definitions: Vector[Definition], trailingComments: Vector[Co else if(operationName.isEmpty && operations.size == 1) Some(operations.head._2) else - operationName flatMap (opName ⇒ operations get Some(opName)) orElse operations.values.headOption + operationName flatMap (opName => operations get Some(opName)) orElse operations.values.headOption def withoutSourceMapper = copy(sourceMapper = None) @@ -50,14 +50,14 @@ case class Document(definitions: Vector[Definition], trailingComments: Vector[Co def separateOperation(operationName: Option[String]) = analyzer.separateOperation(operationName) override def equals(other: Any): Boolean = other match { - case that: Document ⇒ + case that: Document => (that canEqual this) && definitions == that.definitions && location == that.location - case _ ⇒ false + case _ => false } - private lazy val hash = Seq(definitions, location).map(_.hashCode()).foldLeft(0)((a, b) ⇒ 31 * a + b) + private lazy val hash = Seq(definitions, location).map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b) override def hashCode(): Int = hash } @@ -125,15 +125,15 @@ case class InputDocument(values: Vector[Value], trailingComments: Vector[Comment InputDocumentMaterializer.to(this, inputType, variables) override def equals(other: Any): Boolean = other match { - case that: InputDocument ⇒ + case that: InputDocument => (that canEqual this) && values == that.values && location == that.location - case _ ⇒ false + case _ => false } override def hashCode(): Int = - Seq(values, location).map(_.hashCode()).foldLeft(0)((a, b) ⇒ 31 * a + b) + Seq(values, location).map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b) } object InputDocument { @@ -206,9 +206,9 @@ sealed trait Type extends AstNode { def namedType: NamedType = { @annotation.tailrec def loop(tpe: Type): NamedType = tpe match { - case NotNullType(ofType, _) ⇒ loop(ofType) - case ListType(ofType, _) ⇒ loop(ofType) - case named: NamedType ⇒ named + case NotNullType(ofType, _) => loop(ofType) + case ListType(ofType, _) => loop(ofType) + case named: NamedType => named } loop(this) @@ -284,12 +284,12 @@ case class NullValue(comments: Vector[Comment] = Vector.empty, location: Option[ case class ObjectValue(fields: Vector[ObjectField], comments: Vector[Comment] = Vector.empty, location: Option[AstLocation] = None) extends Value { lazy val fieldsByName = fields.foldLeft(ListMap.empty[String, Value]) { - case (acc, field) ⇒ acc + (field.name → field.value) + case (acc, field) => acc + (field.name -> field.value) } } object ObjectValue { - def apply(fields: (String, Value)*): ObjectValue = ObjectValue(fields.toVector map (f ⇒ ObjectField(f._1, f._2))) + def apply(fields: (String, Value)*): ObjectValue = ObjectValue(fields.toVector map (f => ObjectField(f._1, f._2))) } case class ObjectField(name: String, value: Value, comments: Vector[Comment] = Vector.empty, location: Option[AstLocation] = None) extends NameValue @@ -496,13 +496,13 @@ sealed trait AstNode { def visit(visitor: AstVisitor): this.type = AstVisitor.visit(this, visitor) - def visit(onEnter: AstNode ⇒ VisitorCommand, onLeave: AstNode ⇒ VisitorCommand): this.type = + def visit(onEnter: AstNode => VisitorCommand, onLeave: AstNode => VisitorCommand): this.type = AstVisitor.visit(this, onEnter, onLeave) - def visitAstWithTypeInfo(schema: Schema[_, _])(visitorFn: TypeInfo ⇒ AstVisitor): this.type = + def visitAstWithTypeInfo(schema: Schema[_, _])(visitorFn: TypeInfo => AstVisitor): this.type = AstVisitor.visitAstWithTypeInfo[this.type](schema, this)(visitorFn) - def visitAstWithState[S](schema: Schema[_, _], state: S)(visitorFn: (TypeInfo, S) ⇒ AstVisitor): S = + def visitAstWithState[S](schema: Schema[_, _], state: S)(visitorFn: (TypeInfo, S) => AstVisitor): S = AstVisitor.visitAstWithState(schema, this, state)(visitorFn) } @@ -526,76 +526,76 @@ sealed trait ObjectLikeTypeExtensionDefinition extends TypeExtensionDefinition { object AstNode { def withoutAstLocations[T <: AstNode](node: T, stripComments: Boolean = false): T = { - val enterComment = (_: Comment) ⇒ if (stripComments) VisitorCommand.Delete else VisitorCommand.Continue + val enterComment = (_: Comment) => if (stripComments) VisitorCommand.Delete else VisitorCommand.Continue visit[AstNode](node, Visit[Comment](enterComment), - VisitAnyField[AstNode, Option[AstLocation]]((_, _) ⇒ VisitorCommand.Transform(None))).asInstanceOf[T] + VisitAnyField[AstNode, Option[AstLocation]]((_, _) => VisitorCommand.Transform(None))).asInstanceOf[T] } } trait AstVisitor { - def onEnter: PartialFunction[AstNode, VisitorCommand] = {case _ ⇒ VisitorCommand.Continue} - def onLeave: PartialFunction[AstNode, VisitorCommand] = {case _ ⇒ VisitorCommand.Continue} + def onEnter: PartialFunction[AstNode, VisitorCommand] = {case _ => VisitorCommand.Continue} + def onLeave: PartialFunction[AstNode, VisitorCommand] = {case _ => VisitorCommand.Continue} } case class DefaultAstVisitor( - override val onEnter: PartialFunction[AstNode, VisitorCommand] = {case _ ⇒ VisitorCommand.Continue}, - override val onLeave: PartialFunction[AstNode, VisitorCommand] = {case _ ⇒ VisitorCommand.Continue} + override val onEnter: PartialFunction[AstNode, VisitorCommand] = {case _ => VisitorCommand.Continue}, + override val onLeave: PartialFunction[AstNode, VisitorCommand] = {case _ => VisitorCommand.Continue} ) extends AstVisitor object AstVisitor { import AstVisitorCommand._ def apply( - onEnter: PartialFunction[AstNode, VisitorCommand] = {case _ ⇒ VisitorCommand.Continue}, - onLeave: PartialFunction[AstNode, VisitorCommand] = {case _ ⇒ VisitorCommand.Continue} + onEnter: PartialFunction[AstNode, VisitorCommand] = {case _ => VisitorCommand.Continue}, + onLeave: PartialFunction[AstNode, VisitorCommand] = {case _ => VisitorCommand.Continue} ) = DefaultAstVisitor(onEnter, onLeave) def simple( - onEnter: PartialFunction[AstNode, Unit] = {case _ ⇒ ()}, - onLeave: PartialFunction[AstNode, Unit] = {case _ ⇒ ()} + onEnter: PartialFunction[AstNode, Unit] = {case _ => ()}, + onLeave: PartialFunction[AstNode, Unit] = {case _ => ()} ) = DefaultAstVisitor( { - case node if onEnter.isDefinedAt(node) ⇒ + case node if onEnter.isDefinedAt(node) => onEnter(node) VisitorCommand.Continue }, { - case node if onLeave.isDefinedAt(node) ⇒ + case node if onLeave.isDefinedAt(node) => onLeave(node) VisitorCommand.Continue }) def visit[T <: AstNode](root: T, visitor: AstVisitor): T = visit(root, - node ⇒ if (visitor.onEnter.isDefinedAt(node)) visitor.onEnter(node) else VisitorCommand.Continue, - node ⇒ if (visitor.onLeave.isDefinedAt(node)) visitor.onLeave(node) else VisitorCommand.Continue) + node => if (visitor.onEnter.isDefinedAt(node)) visitor.onEnter(node) else VisitorCommand.Continue, + node => if (visitor.onLeave.isDefinedAt(node)) visitor.onLeave(node) else VisitorCommand.Continue) - def visitAstWithTypeInfo[T <: AstNode](schema: Schema[_, _], root: T)(visitorFn: TypeInfo ⇒ AstVisitor): T = { + def visitAstWithTypeInfo[T <: AstNode](schema: Schema[_, _], root: T)(visitorFn: TypeInfo => AstVisitor): T = { val typeInfo = new TypeInfo(schema) val visitor = visitorFn(typeInfo) visit(root, - node ⇒ { + node => { typeInfo.enter(node) if (visitor.onEnter.isDefinedAt(node)) visitor.onEnter(node) else VisitorCommand.Continue }, - node ⇒ { + node => { typeInfo.leave(node) if (visitor.onLeave.isDefinedAt(node)) visitor.onLeave(node) else VisitorCommand.Continue }) } - def visitAstWithState[S](schema: Schema[_, _], root: AstNode, state: S)(visitorFn: (TypeInfo, S) ⇒ AstVisitor): S = { + def visitAstWithState[S](schema: Schema[_, _], root: AstNode, state: S)(visitorFn: (TypeInfo, S) => AstVisitor): S = { val typeInfo = new TypeInfo(schema) val visitor = visitorFn(typeInfo, state) visit(root, - node ⇒ { + node => { typeInfo.enter(node) if (visitor.onEnter.isDefinedAt(node)) visitor.onEnter(node) else VisitorCommand.Continue }, - node ⇒ { + node => { typeInfo.leave(node) if (visitor.onLeave.isDefinedAt(node)) visitor.onLeave(node) else VisitorCommand.Continue }) @@ -605,338 +605,338 @@ object AstVisitor { def visit[T <: AstNode]( root: T, - onEnter: AstNode ⇒ VisitorCommand, - onLeave: AstNode ⇒ VisitorCommand): T = + onEnter: AstNode => VisitorCommand, + onLeave: AstNode => VisitorCommand): T = sangria.visitor.visit[AstNode](root, Visit[AstNode](onEnter, onLeave)).asInstanceOf[T] private[sangria] def visitAstRecursive( doc: AstNode, - onEnter: AstNode ⇒ AstVisitorCommand.Value = _ ⇒ Continue, - onLeave: AstNode ⇒ AstVisitorCommand.Value = _ ⇒ Continue): Unit = { + onEnter: AstNode => AstVisitorCommand.Value = _ => Continue, + onLeave: AstNode => AstVisitorCommand.Value = _ => Continue): Unit = { def breakOrSkip(cmd: AstVisitorCommand.Value) = cmd match { - case Break ⇒ break() - case Skip ⇒ false - case Continue ⇒ true + case Break => break() + case Skip => false + case Continue => true } def loop(node: AstNode): Unit = node match { - case n @ Document(defs, trailingComments, _, _) ⇒ + case n @ Document(defs, trailingComments, _, _) => if (breakOrSkip(onEnter(n))) { - defs.foreach(d ⇒ loop(d)) - trailingComments.foreach(s ⇒ loop(s)) + defs.foreach(d => loop(d)) + trailingComments.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ InputDocument(defs, trailingComments, _, _) ⇒ + case n @ InputDocument(defs, trailingComments, _, _) => if (breakOrSkip(onEnter(n))) { - defs.foreach(d ⇒ loop(d)) - trailingComments.foreach(s ⇒ loop(s)) + defs.foreach(d => loop(d)) + trailingComments.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ FragmentDefinition(_, cond, dirs, sels, vars, comment, trailingComments, _) ⇒ + case n @ FragmentDefinition(_, cond, dirs, sels, vars, comment, trailingComments, _) => if (breakOrSkip(onEnter(n))) { loop(cond) - dirs.foreach(d ⇒ loop(d)) - sels.foreach(s ⇒ loop(s)) - vars.foreach(s ⇒ loop(s)) - comment.foreach(s ⇒ loop(s)) - trailingComments.foreach(s ⇒ loop(s)) + dirs.foreach(d => loop(d)) + sels.foreach(s => loop(s)) + vars.foreach(s => loop(s)) + comment.foreach(s => loop(s)) + trailingComments.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ OperationDefinition(_, _, vars, dirs, sels, comment, trailingComments, _) ⇒ + case n @ OperationDefinition(_, _, vars, dirs, sels, comment, trailingComments, _) => if (breakOrSkip(onEnter(n))) { - vars.foreach(d ⇒ loop(d)) - dirs.foreach(d ⇒ loop(d)) - sels.foreach(s ⇒ loop(s)) - comment.foreach(s ⇒ loop(s)) - trailingComments.foreach(s ⇒ loop(s)) + vars.foreach(d => loop(d)) + dirs.foreach(d => loop(d)) + sels.foreach(s => loop(s)) + comment.foreach(s => loop(s)) + trailingComments.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ VariableDefinition(_, tpe, default, dirs, comment, _) ⇒ + case n @ VariableDefinition(_, tpe, default, dirs, comment, _) => if (breakOrSkip(onEnter(n))) { loop(tpe) - default.foreach(d ⇒ loop(d)) - dirs.foreach(d ⇒ loop(d)) - comment.foreach(s ⇒ loop(s)) + default.foreach(d => loop(d)) + dirs.foreach(d => loop(d)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ InlineFragment(cond, dirs, sels, comment, trailingComments, _) ⇒ + case n @ InlineFragment(cond, dirs, sels, comment, trailingComments, _) => if (breakOrSkip(onEnter(n))) { - cond.foreach(c ⇒ loop(c)) - dirs.foreach(d ⇒ loop(d)) - sels.foreach(s ⇒ loop(s)) - comment.foreach(s ⇒ loop(s)) - trailingComments.foreach(s ⇒ loop(s)) + cond.foreach(c => loop(c)) + dirs.foreach(d => loop(d)) + sels.foreach(s => loop(s)) + comment.foreach(s => loop(s)) + trailingComments.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ FragmentSpread(_, dirs, comment, _) ⇒ + case n @ FragmentSpread(_, dirs, comment, _) => if (breakOrSkip(onEnter(n))) { - dirs.foreach(d ⇒ loop(d)) - comment.foreach(s ⇒ loop(s)) + dirs.foreach(d => loop(d)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ NotNullType(ofType, _) ⇒ + case n @ NotNullType(ofType, _) => if (breakOrSkip(onEnter(n))) { loop(ofType) breakOrSkip(onLeave(n)) } - case n @ ListType(ofType, _) ⇒ + case n @ ListType(ofType, _) => if (breakOrSkip(onEnter(n))) { loop(ofType) breakOrSkip(onLeave(n)) } - case n @ Field(_, _, args, dirs, sels, comment, trailingComments, _) ⇒ + case n @ Field(_, _, args, dirs, sels, comment, trailingComments, _) => if (breakOrSkip(onEnter(n))) { - args.foreach(d ⇒ loop(d)) - dirs.foreach(d ⇒ loop(d)) - sels.foreach(s ⇒ loop(s)) - comment.foreach(s ⇒ loop(s)) - trailingComments.foreach(s ⇒ loop(s)) + args.foreach(d => loop(d)) + dirs.foreach(d => loop(d)) + sels.foreach(s => loop(s)) + comment.foreach(s => loop(s)) + trailingComments.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ Argument(_, v, comment, _) ⇒ + case n @ Argument(_, v, comment, _) => if (breakOrSkip(onEnter(n))) { loop(v) - comment.foreach(s ⇒ loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ ObjectField(_, v, comment, _) ⇒ + case n @ ObjectField(_, v, comment, _) => if (breakOrSkip(onEnter(n))) { loop(v) - comment.foreach(s ⇒ loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ Directive(_, args, comment, _) ⇒ + case n @ Directive(_, args, comment, _) => if (breakOrSkip(onEnter(n))) { - args.foreach(d ⇒ loop(d)) - comment.foreach(s ⇒ loop(s)) + args.foreach(d => loop(d)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ ListValue(vals, comment, _) ⇒ + case n @ ListValue(vals, comment, _) => if (breakOrSkip(onEnter(n))) { - vals.foreach(v ⇒ loop(v)) - comment.foreach(s ⇒ loop(s)) + vals.foreach(v => loop(v)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ ObjectValue(fields, comment, _) ⇒ + case n @ ObjectValue(fields, comment, _) => if (breakOrSkip(onEnter(n))) { - fields.foreach(f ⇒ loop(f)) - comment.foreach(s ⇒ loop(s)) + fields.foreach(f => loop(f)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ BigDecimalValue(_, comment, _) ⇒ + case n @ BigDecimalValue(_, comment, _) => if (breakOrSkip(onEnter(n))) { - comment.foreach(s ⇒ loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ BooleanValue(_, comment, _) ⇒ + case n @ BooleanValue(_, comment, _) => if (breakOrSkip(onEnter(n))) { - comment.foreach(s ⇒ loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ Comment(_, _) ⇒ + case n @ Comment(_, _) => if (breakOrSkip(onEnter(n))) { breakOrSkip(onLeave(n)) } - case n @ VariableValue(_, comment, _) ⇒ + case n @ VariableValue(_, comment, _) => if (breakOrSkip(onEnter(n))) { - comment.foreach(s ⇒ loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ EnumValue(_, comment, _) ⇒ + case n @ EnumValue(_, comment, _) => if (breakOrSkip(onEnter(n))) { - comment.foreach(s ⇒ loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ NullValue(comment, _) ⇒ + case n @ NullValue(comment, _) => if (breakOrSkip(onEnter(n))) { - comment.foreach(s ⇒ loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ NamedType(_, _) ⇒ + case n @ NamedType(_, _) => if (breakOrSkip(onEnter(n))) { breakOrSkip(onLeave(n)) } - case n @ StringValue(_, _, _, comment, _) ⇒ + case n @ StringValue(_, _, _, comment, _) => if (breakOrSkip(onEnter(n))) { - comment.foreach(s ⇒ loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ BigIntValue(_, comment, _) ⇒ + case n @ BigIntValue(_, comment, _) => if (breakOrSkip(onEnter(n))) { - comment.foreach(s ⇒ loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ IntValue(_, comment, _) ⇒ + case n @ IntValue(_, comment, _) => if (breakOrSkip(onEnter(n))) { - comment.foreach(s ⇒ loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ FloatValue(_, comment, _) ⇒ + case n @ FloatValue(_, comment, _) => if (breakOrSkip(onEnter(n))) { - comment.foreach(s ⇒ loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } // SDL schema definition - case n @ ScalarTypeDefinition(_, dirs, description, comment, _) ⇒ + case n @ ScalarTypeDefinition(_, dirs, description, comment, _) => if (breakOrSkip(onEnter(n))) { - dirs.foreach(d ⇒ loop(d)) - description.foreach(s ⇒ loop(s)) - comment.foreach(s ⇒ loop(s)) + dirs.foreach(d => loop(d)) + description.foreach(s => loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ FieldDefinition(_, fieldType, args, dirs, description, comment, _) ⇒ + case n @ FieldDefinition(_, fieldType, args, dirs, description, comment, _) => if (breakOrSkip(onEnter(n))) { loop(fieldType) - args.foreach(d ⇒ loop(d)) - dirs.foreach(d ⇒ loop(d)) - description.foreach(s ⇒ loop(s)) - comment.foreach(s ⇒ loop(s)) + args.foreach(d => loop(d)) + dirs.foreach(d => loop(d)) + description.foreach(s => loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ InputValueDefinition(_, valueType, default, dirs, description, comment, _) ⇒ + case n @ InputValueDefinition(_, valueType, default, dirs, description, comment, _) => if (breakOrSkip(onEnter(n))) { loop(valueType) - default.foreach(d ⇒ loop(d)) - dirs.foreach(d ⇒ loop(d)) - description.foreach(s ⇒ loop(s)) - comment.foreach(s ⇒ loop(s)) + default.foreach(d => loop(d)) + dirs.foreach(d => loop(d)) + description.foreach(s => loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ ObjectTypeDefinition(_, interfaces, fields, dirs, description, comment, trailingComments, _) ⇒ + case n @ ObjectTypeDefinition(_, interfaces, fields, dirs, description, comment, trailingComments, _) => if (breakOrSkip(onEnter(n))) { - interfaces.foreach(d ⇒ loop(d)) - fields.foreach(d ⇒ loop(d)) - dirs.foreach(d ⇒ loop(d)) - description.foreach(s ⇒ loop(s)) - comment.foreach(s ⇒ loop(s)) - trailingComments.foreach(s ⇒ loop(s)) + interfaces.foreach(d => loop(d)) + fields.foreach(d => loop(d)) + dirs.foreach(d => loop(d)) + description.foreach(s => loop(s)) + comment.foreach(s => loop(s)) + trailingComments.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ InterfaceTypeDefinition(_, fields, dirs, description, comment, trailingComments, _) ⇒ + case n @ InterfaceTypeDefinition(_, fields, dirs, description, comment, trailingComments, _) => if (breakOrSkip(onEnter(n))) { - fields.foreach(d ⇒ loop(d)) - dirs.foreach(d ⇒ loop(d)) - description.foreach(s ⇒ loop(s)) - comment.foreach(s ⇒ loop(s)) - trailingComments.foreach(s ⇒ loop(s)) + fields.foreach(d => loop(d)) + dirs.foreach(d => loop(d)) + description.foreach(s => loop(s)) + comment.foreach(s => loop(s)) + trailingComments.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ UnionTypeDefinition(_, types, dirs, description, comment, _) ⇒ + case n @ UnionTypeDefinition(_, types, dirs, description, comment, _) => if (breakOrSkip(onEnter(n))) { - types.foreach(d ⇒ loop(d)) - dirs.foreach(d ⇒ loop(d)) - description.foreach(s ⇒ loop(s)) - comment.foreach(s ⇒ loop(s)) + types.foreach(d => loop(d)) + dirs.foreach(d => loop(d)) + description.foreach(s => loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ EnumTypeDefinition(_, values, dirs, description, comment, trailingComments, _) ⇒ + case n @ EnumTypeDefinition(_, values, dirs, description, comment, trailingComments, _) => if (breakOrSkip(onEnter(n))) { - values.foreach(d ⇒ loop(d)) - dirs.foreach(d ⇒ loop(d)) - description.foreach(s ⇒ loop(s)) - comment.foreach(s ⇒ loop(s)) - trailingComments.foreach(s ⇒ loop(s)) + values.foreach(d => loop(d)) + dirs.foreach(d => loop(d)) + description.foreach(s => loop(s)) + comment.foreach(s => loop(s)) + trailingComments.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ EnumValueDefinition(_, dirs, description, comment, _) ⇒ + case n @ EnumValueDefinition(_, dirs, description, comment, _) => if (breakOrSkip(onEnter(n))) { - dirs.foreach(d ⇒ loop(d)) - description.foreach(s ⇒ loop(s)) - comment.foreach(s ⇒ loop(s)) + dirs.foreach(d => loop(d)) + description.foreach(s => loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ InputObjectTypeDefinition(_, fields, dirs, description, comment, trailingComments, _) ⇒ + case n @ InputObjectTypeDefinition(_, fields, dirs, description, comment, trailingComments, _) => if (breakOrSkip(onEnter(n))) { - fields.foreach(d ⇒ loop(d)) - dirs.foreach(d ⇒ loop(d)) - comment.foreach(s ⇒ loop(s)) - trailingComments.foreach(s ⇒ loop(s)) + fields.foreach(d => loop(d)) + dirs.foreach(d => loop(d)) + comment.foreach(s => loop(s)) + trailingComments.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ ObjectTypeExtensionDefinition(_, ints, fields, dirs, comment, tc, _) ⇒ + case n @ ObjectTypeExtensionDefinition(_, ints, fields, dirs, comment, tc, _) => if (breakOrSkip(onEnter(n))) { - ints.foreach(d ⇒ loop(d)) - fields.foreach(d ⇒ loop(d)) - dirs.foreach(d ⇒ loop(d)) - comment.foreach(s ⇒ loop(s)) - tc.foreach(s ⇒ loop(s)) + ints.foreach(d => loop(d)) + fields.foreach(d => loop(d)) + dirs.foreach(d => loop(d)) + comment.foreach(s => loop(s)) + tc.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ InterfaceTypeExtensionDefinition(_, fields, dirs, comment, tc, _) ⇒ + case n @ InterfaceTypeExtensionDefinition(_, fields, dirs, comment, tc, _) => if (breakOrSkip(onEnter(n))) { - fields.foreach(d ⇒ loop(d)) - dirs.foreach(d ⇒ loop(d)) - comment.foreach(s ⇒ loop(s)) - tc.foreach(s ⇒ loop(s)) + fields.foreach(d => loop(d)) + dirs.foreach(d => loop(d)) + comment.foreach(s => loop(s)) + tc.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ InputObjectTypeExtensionDefinition(_, fields, dirs, comment, tc, _) ⇒ + case n @ InputObjectTypeExtensionDefinition(_, fields, dirs, comment, tc, _) => if (breakOrSkip(onEnter(n))) { - fields.foreach(d ⇒ loop(d)) - dirs.foreach(d ⇒ loop(d)) - comment.foreach(s ⇒ loop(s)) - tc.foreach(s ⇒ loop(s)) + fields.foreach(d => loop(d)) + dirs.foreach(d => loop(d)) + comment.foreach(s => loop(s)) + tc.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ UnionTypeExtensionDefinition(_, types, dirs, comment, _) ⇒ + case n @ UnionTypeExtensionDefinition(_, types, dirs, comment, _) => if (breakOrSkip(onEnter(n))) { - types.foreach(t ⇒ loop(t)) - dirs.foreach(d ⇒ loop(d)) - comment.foreach(s ⇒ loop(s)) + types.foreach(t => loop(t)) + dirs.foreach(d => loop(d)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ EnumTypeExtensionDefinition(_, values, dirs, comment, tc, _) ⇒ + case n @ EnumTypeExtensionDefinition(_, values, dirs, comment, tc, _) => if (breakOrSkip(onEnter(n))) { - values.foreach(t ⇒ loop(t)) - dirs.foreach(d ⇒ loop(d)) - comment.foreach(s ⇒ loop(s)) - tc.foreach(s ⇒ loop(s)) + values.foreach(t => loop(t)) + dirs.foreach(d => loop(d)) + comment.foreach(s => loop(s)) + tc.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ ScalarTypeExtensionDefinition(_, dirs, comment, _) ⇒ + case n @ ScalarTypeExtensionDefinition(_, dirs, comment, _) => if (breakOrSkip(onEnter(n))) { - dirs.foreach(d ⇒ loop(d)) - comment.foreach(s ⇒ loop(s)) + dirs.foreach(d => loop(d)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ SchemaExtensionDefinition(ops, dirs, comment, tc, _) ⇒ + case n @ SchemaExtensionDefinition(ops, dirs, comment, tc, _) => if (breakOrSkip(onEnter(n))) { - ops.foreach(op ⇒ loop(op)) - dirs.foreach(d ⇒ loop(d)) - comment.foreach(s ⇒ loop(s)) - tc.foreach(c ⇒ loop(c)) + ops.foreach(op => loop(op)) + dirs.foreach(d => loop(d)) + comment.foreach(s => loop(s)) + tc.foreach(c => loop(c)) breakOrSkip(onLeave(n)) } - case n @ DirectiveDefinition(_, args, locations, description, comment, _) ⇒ + case n @ DirectiveDefinition(_, args, locations, description, comment, _) => if (breakOrSkip(onEnter(n))) { - args.foreach(d ⇒ loop(d)) - locations.foreach(d ⇒ loop(d)) - description.foreach(s ⇒ loop(s)) - comment.foreach(s ⇒ loop(s)) + args.foreach(d => loop(d)) + locations.foreach(d => loop(d)) + description.foreach(s => loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ DirectiveLocation(_, comment, _) ⇒ + case n @ DirectiveLocation(_, comment, _) => if (breakOrSkip(onEnter(n))) { - comment.foreach(s ⇒ loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ SchemaDefinition(ops, dirs, descr, comment, trailingComments, _) ⇒ + case n @ SchemaDefinition(ops, dirs, descr, comment, trailingComments, _) => if (breakOrSkip(onEnter(n))) { - ops.foreach(s ⇒ loop(s)) - dirs.foreach(s ⇒ loop(s)) - descr.foreach(s ⇒ loop(s)) - comment.foreach(s ⇒ loop(s)) - trailingComments.foreach(s ⇒ loop(s)) + ops.foreach(s => loop(s)) + dirs.foreach(s => loop(s)) + descr.foreach(s => loop(s)) + comment.foreach(s => loop(s)) + trailingComments.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } - case n @ OperationTypeDefinition(_, tpe, comment, _) ⇒ + case n @ OperationTypeDefinition(_, tpe, comment, _) => if (breakOrSkip(onEnter(n))) { loop(tpe) - comment.foreach(s ⇒ loop(s)) + comment.foreach(s => loop(s)) breakOrSkip(onLeave(n)) } } diff --git a/src/main/scala/sangria/execution/DeprecationTracker.scala b/src/main/scala/sangria/execution/DeprecationTracker.scala index f0c27634..8cc50961 100644 --- a/src/main/scala/sangria/execution/DeprecationTracker.scala +++ b/src/main/scala/sangria/execution/DeprecationTracker.scala @@ -17,7 +17,7 @@ object NilDeprecationTracker extends DeprecationTracker { def deprecatedEnumValueUsed[T, Ctx](enum: EnumType[T], value: T, userContext: Ctx) = () } -class LoggingDeprecationTracker(logFn: String ⇒ Unit) extends DeprecationTracker { +class LoggingDeprecationTracker(logFn: String => Unit) extends DeprecationTracker { def deprecatedFieldUsed[Ctx](ctx: Context[Ctx, _]) = logFn(s"Deprecated field '${ctx.parentType.name}.${ctx.field.name}' used at path '${ctx.path}'.") diff --git a/src/main/scala/sangria/execution/ExecutionError.scala b/src/main/scala/sangria/execution/ExecutionError.scala index 48019e4a..63433389 100644 --- a/src/main/scala/sangria/execution/ExecutionError.scala +++ b/src/main/scala/sangria/execution/ExecutionError.scala @@ -15,7 +15,7 @@ trait WithViolations extends UserFacingError{ } trait ErrorWithResolver { - this: Throwable ⇒ + this: Throwable => def exceptionHandler: ExceptionHandler @@ -38,13 +38,13 @@ case class UndefinedConcreteTypeError(path: ExecutionPath, abstractType: Abstrac object UndefinedConcreteTypeError { private def renderAbstractType(abstractType: AbstractType) = abstractType match { - case _: UnionType[_] ⇒ "a union" - case _: InterfaceType[_, _] ⇒ "an interface" + case _: UnionType[_] => "a union" + case _: InterfaceType[_, _] => "an interface" } private def renderPossibleTypes(possibleTypes: Vector[ObjectType[_, _]]) = if (possibleTypes.isEmpty) "none" - else possibleTypes.map(pt ⇒ s"${pt.name} (defined for '${pt.valClass.getName}')") mkString ", " + else possibleTypes.map(pt => s"${pt.name} (defined for '${pt.valClass.getName}')") mkString ", " private def renderValueClass(value: Any) = value.getClass.getName } @@ -54,7 +54,7 @@ case class MaxQueryDepthReachedError(maxDepth: Int) extends Exception(s"Max quer case object IntrospectionNotAllowedError extends Exception(s"Introspection is not allowed.") with UserFacingError trait QueryAnalysisError extends ErrorWithResolver { - this: Throwable ⇒ + this: Throwable => } case class VariableCoercionError(violations: Vector[Violation], eh: ExceptionHandler) extends ExecutionError( diff --git a/src/main/scala/sangria/execution/ExecutionPath.scala b/src/main/scala/sangria/execution/ExecutionPath.scala index c326f610..50bc83be 100644 --- a/src/main/scala/sangria/execution/ExecutionPath.scala +++ b/src/main/scala/sangria/execution/ExecutionPath.scala @@ -16,7 +16,7 @@ case class ExecutionPath private (path: Vector[Any], cacheKeyPath: ExecutionPath /** * @return last index in the path, if available */ - def lastIndex: Option[Int] = path.lastOption.collect {case i: Int ⇒ i} + def lastIndex: Option[Int] = path.lastOption.collect {case i: Int => i} /** * @return the size of the path excluding the indexes @@ -24,19 +24,19 @@ case class ExecutionPath private (path: Vector[Any], cacheKeyPath: ExecutionPath def size = cacheKeyPath.size / 2 def marshal(m: ResultMarshaller): m.Node = m.arrayNode(path.map { - case s: String ⇒ m.scalarNode(s, "String", Set.empty) - case i: Int ⇒ m.scalarNode(i, "Int", Set.empty) + case s: String => m.scalarNode(s, "String", Set.empty) + case i: Int => m.scalarNode(i, "Int", Set.empty) }) def cacheKey: ExecutionPath.PathCacheKey = cacheKeyPath override def toString = path.foldLeft("") { - case ("", str: String) ⇒ str - case (acc, str: String) ⇒ acc + "." + str - case (acc, idx: Int) ⇒ acc + "[" + idx + "]" + case ("", str: String) => str + case (acc, str: String) => acc + "." + str + case (acc, idx: Int) => acc + "[" + idx + "]" - case ("", other) ⇒ other.toString - case (acc, other) ⇒ acc + "." + other.toString + case ("", other) => other.toString + case (acc, other) => acc + "." + other.toString } } @@ -44,4 +44,4 @@ object ExecutionPath { type PathCacheKey = Vector[String] val empty = new ExecutionPath(Vector.empty, Vector.empty) -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/execution/ExecutionScheme.scala b/src/main/scala/sangria/execution/ExecutionScheme.scala index 659c7ba9..8c7aa727 100644 --- a/src/main/scala/sangria/execution/ExecutionScheme.scala +++ b/src/main/scala/sangria/execution/ExecutionScheme.scala @@ -9,8 +9,8 @@ sealed trait ExecutionScheme { type Result[Ctx, Res] def failed[Ctx, Res](error: Throwable): Result[Ctx, Res] - def onComplete[Ctx, Res](result: Result[Ctx, Res])(op: ⇒ Unit)(implicit ec: ExecutionContext): Result[Ctx, Res] - def flatMapFuture[Ctx, Res, T](future: Future[T])(resultFn: T ⇒ Result[Ctx, Res])(implicit ec: ExecutionContext): Result[Ctx, Res] + def onComplete[Ctx, Res](result: Result[Ctx, Res])(op: => Unit)(implicit ec: ExecutionContext): Result[Ctx, Res] + def flatMapFuture[Ctx, Res, T](future: Future[T])(resultFn: T => Result[Ctx, Res])(implicit ec: ExecutionContext): Result[Ctx, Res] def extended: Boolean } @@ -21,12 +21,12 @@ object ExecutionScheme extends AlternativeExecutionScheme { def failed[Ctx, Res](error: Throwable): Result[Ctx, Res] = Future.failed(error) - def onComplete[Ctx, Res](result: Result[Ctx, Res])(op: ⇒ Unit)(implicit ec: ExecutionContext): Result[Ctx, Res] = + def onComplete[Ctx, Res](result: Result[Ctx, Res])(op: => Unit)(implicit ec: ExecutionContext): Result[Ctx, Res] = result - .map {x ⇒ op; x} - .recover {case e ⇒ op; throw e} + .map {x => op; x} + .recover {case e => op; throw e} - def flatMapFuture[Ctx, Res, T](future: Future[T])(resultFn: T ⇒ Result[Ctx, Res])(implicit ec: ExecutionContext): Result[Ctx, Res] = + def flatMapFuture[Ctx, Res, T](future: Future[T])(resultFn: T => Result[Ctx, Res])(implicit ec: ExecutionContext): Result[Ctx, Res] = future flatMap resultFn def extended = false @@ -44,12 +44,12 @@ trait AlternativeExecutionScheme { def failed[Ctx, Res](error: Throwable): Result[Ctx, Res] = Future.failed(error) - def onComplete[Ctx, Res](result: Result[Ctx, Res])(op: ⇒ Unit)(implicit ec: ExecutionContext): Result[Ctx, Res] = + def onComplete[Ctx, Res](result: Result[Ctx, Res])(op: => Unit)(implicit ec: ExecutionContext): Result[Ctx, Res] = result - .map { x ⇒ op; x} - .recover { case e ⇒ op; throw e} + .map { x => op; x} + .recover { case e => op; throw e} - def flatMapFuture[Ctx, Res, T](future: Future[T])(resultFn: T ⇒ Result[Ctx, Res])(implicit ec: ExecutionContext): Result[Ctx, Res] = + def flatMapFuture[Ctx, Res, T](future: Future[T])(resultFn: T => Result[Ctx, Res])(implicit ec: ExecutionContext): Result[Ctx, Res] = future flatMap resultFn def extended = true @@ -65,10 +65,10 @@ trait AlternativeExecutionScheme { def failed[Ctx, Res](error: Throwable): Result[Ctx, Res] = stream.failed(error) - def onComplete[Ctx, Res](result: Result[Ctx, Res])(op: ⇒ Unit)(implicit ec: ExecutionContext): Result[Ctx, Res] = + def onComplete[Ctx, Res](result: Result[Ctx, Res])(op: => Unit)(implicit ec: ExecutionContext): Result[Ctx, Res] = stream.onComplete(result)(op) - def flatMapFuture[Ctx, Res, T](future: Future[T])(resultFn: T ⇒ Result[Ctx, Res])(implicit ec: ExecutionContext): Result[Ctx, Res] = + def flatMapFuture[Ctx, Res, T](future: Future[T])(resultFn: T => Result[Ctx, Res])(implicit ec: ExecutionContext): Result[Ctx, Res] = stream.flatMapFuture(future)(resultFn) } @@ -82,10 +82,10 @@ trait AlternativeExecutionScheme { def failed[Ctx, Res](error: Throwable): Result[Ctx, Res] = stream.failed(error) - def onComplete[Ctx, Res](result: Result[Ctx, Res])(op: ⇒ Unit)(implicit ec: ExecutionContext): Result[Ctx, Res] = + def onComplete[Ctx, Res](result: Result[Ctx, Res])(op: => Unit)(implicit ec: ExecutionContext): Result[Ctx, Res] = stream.onComplete(result)(op) - def flatMapFuture[Ctx, Res, T](future: Future[T])(resultFn: T ⇒ Result[Ctx, Res])(implicit ec: ExecutionContext): Result[Ctx, Res] = + def flatMapFuture[Ctx, Res, T](future: Future[T])(resultFn: T => Result[Ctx, Res])(implicit ec: ExecutionContext): Result[Ctx, Res] = stream.flatMapFuture(future)(resultFn) } } @@ -96,4 +96,4 @@ case class ExecutionResult[Ctx, Res]( errors: Vector[RegisteredError], middlewareVals: List[(Any, Middleware[_])], validationTiming: TimeMeasurement, - queryReducerTiming: TimeMeasurement) \ No newline at end of file + queryReducerTiming: TimeMeasurement) diff --git a/src/main/scala/sangria/execution/Executor.scala b/src/main/scala/sangria/execution/Executor.scala index 8c9ca108..7668e453 100644 --- a/src/main/scala/sangria/execution/Executor.scala +++ b/src/main/scala/sangria/execution/Executor.scala @@ -37,34 +37,34 @@ case class Executor[Ctx, Root]( val valueCollector = new ValueCollector[Ctx, Input](schema, variables, queryAst.sourceMapper, deprecationTracker, userContext, exceptionHandler, scalarMiddleware, false)(um) val executionResult = for { - operation ← Executor.getOperation(exceptionHandler,queryAst, operationName) - unmarshalledVariables ← valueCollector.getVariableValues(operation.variables, scalarMiddleware) + operation <- Executor.getOperation(exceptionHandler,queryAst, operationName) + unmarshalledVariables <- valueCollector.getVariableValues(operation.variables, scalarMiddleware) fieldCollector = new FieldCollector[Ctx, Root](schema, queryAst, unmarshalledVariables, queryAst.sourceMapper, valueCollector, exceptionHandler) - tpe ← Executor.getOperationRootType(schema, exceptionHandler, operation, queryAst.sourceMapper) - fields ← fieldCollector.collectFields(ExecutionPath.empty, tpe, Vector(operation)) + tpe <- Executor.getOperationRootType(schema, exceptionHandler, operation, queryAst.sourceMapper) + fields <- fieldCollector.collectFields(ExecutionPath.empty, tpe, Vector(operation)) } yield { val preparedFields = fields.fields.flatMap { - case CollectedField(_, astField, Success(_)) ⇒ + case CollectedField(_, astField, Success(_)) => val allFields = tpe.getField(schema, astField.name).asInstanceOf[Vector[Field[Ctx, Root]]] val field = allFields.head val args = valueCollector.getFieldArgumentValues(ExecutionPath.empty.add(astField, tpe), Some(astField), field.arguments, astField.arguments, unmarshalledVariables) args.toOption.map(PreparedField(field, _)) - case _ ⇒ None + case _ => None } QueryReducerExecutor.reduceQuery(schema, queryReducers, exceptionHandler, fieldCollector, valueCollector, unmarshalledVariables, tpe, fields, userContext).map { - case (newCtx, timing) ⇒ + case (newCtx, timing) => new PreparedQuery[Ctx, Root, Input](queryAst, operation, tpe, newCtx, root, preparedFields, - (c: Ctx, r: Root, m: ResultMarshaller, scheme: ExecutionScheme) ⇒ + (c: Ctx, r: Root, m: ResultMarshaller, scheme: ExecutionScheme) => executeOperation(queryAst, operationName, variables, um, operation, queryAst.sourceMapper, valueCollector, fieldCollector, m, unmarshalledVariables, tpe, fields, c, r, scheme, validationTiming, timing)) } } executionResult match { - case Success(future) ⇒ future - case Failure(error) ⇒ Future.failed(error) + case Success(future) => future + case Failure(error) => Future.failed(error) } } } @@ -85,22 +85,22 @@ case class Executor[Ctx, Root]( val valueCollector = new ValueCollector[Ctx, Input](schema, variables, queryAst.sourceMapper, deprecationTracker, userContext, exceptionHandler, scalarMiddleware, false)(um) val executionResult = for { - operation ← Executor.getOperation(exceptionHandler, queryAst, operationName) - unmarshalledVariables ← valueCollector.getVariableValues(operation.variables, scalarMiddleware) + operation <- Executor.getOperation(exceptionHandler, queryAst, operationName) + unmarshalledVariables <- valueCollector.getVariableValues(operation.variables, scalarMiddleware) fieldCollector = new FieldCollector[Ctx, Root](schema, queryAst, unmarshalledVariables, queryAst.sourceMapper, valueCollector, exceptionHandler) - tpe ← Executor.getOperationRootType(schema, exceptionHandler, operation, queryAst.sourceMapper) - fields ← fieldCollector.collectFields(ExecutionPath.empty, tpe, Vector(operation)) + tpe <- Executor.getOperationRootType(schema, exceptionHandler, operation, queryAst.sourceMapper) + fields <- fieldCollector.collectFields(ExecutionPath.empty, tpe, Vector(operation)) } yield { val reduced = QueryReducerExecutor.reduceQuery(schema, queryReducers, exceptionHandler, fieldCollector, valueCollector, unmarshalledVariables, tpe, fields, userContext) - scheme.flatMapFuture(reduced){ case (newCtx, timing) ⇒ + scheme.flatMapFuture(reduced){ case (newCtx, timing) => executeOperation(queryAst, operationName, variables, um, operation, queryAst.sourceMapper, valueCollector, fieldCollector, marshaller, unmarshalledVariables, tpe, fields, newCtx, root, scheme, validationTiming, timing) } } executionResult match { - case Success(result) ⇒ result - case Failure(error) ⇒ scheme.failed(error) + case Success(result) => result + case Failure(error) => scheme.failed(error) } } } @@ -127,7 +127,7 @@ case class Executor[Ctx, Root]( val middlewareCtx = MiddlewareQueryContext(ctx, this, queryAst, operationName, inputVariables, inputUnmarshaller, validationTiming, queryReducerTiming) try { - val middlewareVal = middleware map (m ⇒ m.beforeQuery(middlewareCtx) → m) + val middlewareVal = middleware map (m => m.beforeQuery(middlewareCtx) -> m) val deferredResolverState = deferredResolver.initialQueryState val resolver = new Resolver[Ctx]( @@ -152,14 +152,14 @@ case class Executor[Ctx, Root]( val result = operation.operationType match { - case ast.OperationType.Query ⇒ resolver.resolveFieldsPar(tpe, root, fields)(scheme).asInstanceOf[scheme.Result[Ctx, marshaller.Node]] - case ast.OperationType.Mutation ⇒ resolver.resolveFieldsSeq(tpe, root, fields)(scheme).asInstanceOf[scheme.Result[Ctx, marshaller.Node]] - case ast.OperationType.Subscription ⇒ - tpe.uniqueFields.head.tags.collectFirst{case SubscriptionField(s) ⇒ s} match { - case Some(stream) ⇒ + case ast.OperationType.Query => resolver.resolveFieldsPar(tpe, root, fields)(scheme).asInstanceOf[scheme.Result[Ctx, marshaller.Node]] + case ast.OperationType.Mutation => resolver.resolveFieldsSeq(tpe, root, fields)(scheme).asInstanceOf[scheme.Result[Ctx, marshaller.Node]] + case ast.OperationType.Subscription => + tpe.uniqueFields.head.tags.collectFirst{case SubscriptionField(s) => s} match { + case Some(stream) => // Streaming is supported - resolve as a real subscription resolver.resolveFieldsSubs(tpe, root, fields)(scheme).asInstanceOf[scheme.Result[Ctx, marshaller.Node]] - case None ⇒ + case None => // No streaming is supported - resolve as a normal "query" operation resolver.resolveFieldsPar(tpe, root, fields)(scheme).asInstanceOf[scheme.Result[Ctx, marshaller.Node]] } @@ -168,10 +168,10 @@ case class Executor[Ctx, Root]( if (middlewareVal.nonEmpty) scheme.onComplete(result)( - middlewareVal foreach { case (v, m) ⇒ m.afterQuery(v.asInstanceOf[m.QueryVal], middlewareCtx)}) + middlewareVal foreach { case (v, m) => m.afterQuery(v.asInstanceOf[m.QueryVal], middlewareCtx)}) else result } catch { - case NonFatal(error) ⇒ + case NonFatal(error) => scheme.failed(error) } } @@ -217,12 +217,12 @@ object Executor { .prepare(queryAst, userContext, root, operationName, variables) def getOperationRootType[Ctx, Root](schema: Schema[Ctx, Root], exceptionHandler: ExceptionHandler, operation: ast.OperationDefinition, sourceMapper: Option[SourceMapper]) = operation.operationType match { - case ast.OperationType.Query ⇒ + case ast.OperationType.Query => Success(schema.query) - case ast.OperationType.Mutation ⇒ + case ast.OperationType.Mutation => schema.mutation map (Success(_)) getOrElse Failure(OperationSelectionError("Schema is not configured for mutations", exceptionHandler, sourceMapper, operation.location.toList)) - case ast.OperationType.Subscription ⇒ + case ast.OperationType.Subscription => schema.subscription map (Success(_)) getOrElse Failure(OperationSelectionError("Schema is not configured for subscriptions", exceptionHandler, sourceMapper, operation.location.toList)) } @@ -231,17 +231,17 @@ object Executor { if (document.operations.size != 1 && operationName.isEmpty) Failure(OperationSelectionError("Must provide operation name if query contains multiple operations", exceptionHandler)) else { - val unexpectedDefinition = document.definitions.find(d ⇒ !(d.isInstanceOf[ast.OperationDefinition] || d.isInstanceOf[ast.FragmentDefinition])) + val unexpectedDefinition = document.definitions.find(d => !(d.isInstanceOf[ast.OperationDefinition] || d.isInstanceOf[ast.FragmentDefinition])) unexpectedDefinition match { - case Some(unexpected) ⇒ + case Some(unexpected) => Failure(new ExecutionError(s"GraphQL cannot execute a request containing a ${unexpected.getClass.getSimpleName}.", exceptionHandler)) - case None ⇒ + case None => operationName match { - case Some(opName) ⇒ + case Some(opName) => document.operations get Some(opName) map (Success(_)) getOrElse Failure(OperationSelectionError(s"Unknown operation name '$opName'", exceptionHandler)) - case None ⇒ + case None => Success(document.operations.values.head) } } @@ -255,7 +255,7 @@ class PreparedQuery[Ctx, Root, Input] private[execution] ( val userContext: Ctx, val root: Root, val fields: Seq[PreparedField[Ctx, Root]], - execFn: (Ctx, Root, ResultMarshaller, ExecutionScheme) ⇒ Any) { + execFn: (Ctx, Root, ResultMarshaller, ExecutionScheme) => Any) { def execute(userContext: Ctx = userContext, root: Root = root)(implicit marshaller: ResultMarshaller, scheme: ExecutionScheme): scheme.Result[Ctx, marshaller.Node] = execFn(userContext, root, marshaller, scheme).asInstanceOf[scheme.Result[Ctx, marshaller.Node]] } diff --git a/src/main/scala/sangria/execution/FieldCollector.scala b/src/main/scala/sangria/execution/FieldCollector.scala index 1b18c827..9f844429 100644 --- a/src/main/scala/sangria/execution/FieldCollector.scala +++ b/src/main/scala/sangria/execution/FieldCollector.scala @@ -20,11 +20,11 @@ class FieldCollector[Ctx, Val]( private val resultCache = Cache.empty[(ExecutionPath.PathCacheKey, String), Try[CollectedFields]] def collectFields(path: ExecutionPath, tpe: ObjectType[Ctx, _], selections: Vector[ast.SelectionContainer]): Try[CollectedFields] = - resultCache.getOrElseUpdate(path.cacheKey → tpe.name, { + resultCache.getOrElseUpdate(path.cacheKey -> tpe.name, { val builder: Try[CollectedFieldsBuilder] = Success(new CollectedFieldsBuilder) selections.foldLeft(builder) { - case (acc, s) ⇒ collectFieldsInternal(tpe, s.selections, MutableSet.empty, acc) + case (acc, s) => collectFieldsInternal(tpe, s.selections, MutableSet.empty, acc) } builder map (_.build) @@ -32,48 +32,48 @@ class FieldCollector[Ctx, Val]( private def collectFieldsInternal(tpe: ObjectType[Ctx, _], selections: Vector[ast.Selection], visitedFragments: MutableSet[String], initial: Try[CollectedFieldsBuilder]): Try[CollectedFieldsBuilder] = selections.foldLeft(initial) { - case (f @ Failure(_), selection) ⇒ f - case (s @ Success(acc), selection) ⇒ + case (f @ Failure(_), selection) => f + case (s @ Success(acc), selection) => selection match { - case field @ ast.Field(_, _, _, dirs, _, _, _, _) ⇒ + case field @ ast.Field(_, _, _, dirs, _, _, _, _) => val name = field.outputName shouldIncludeNode(dirs, selection) match { - case Success(true) ⇒ + case Success(true) => acc.add(name, field) s - case Success(false) ⇒ s - case Failure(error) ⇒ + case Success(false) => s + case Failure(error) => acc.addError(name, field, error) s } - case fragment @ ast.InlineFragment(_, dirs, fragmentSelections, _, _, _) ⇒ + case fragment @ ast.InlineFragment(_, dirs, fragmentSelections, _, _, _) => for { - shouldInclude ← shouldIncludeNode(dirs, selection) - fragmentConditionMatch ← doesFragmentConditionMatch(tpe, fragment) - fragmentFields ← + shouldInclude <- shouldIncludeNode(dirs, selection) + fragmentConditionMatch <- doesFragmentConditionMatch(tpe, fragment) + fragmentFields <- if (shouldInclude && fragmentConditionMatch) collectFieldsInternal(tpe, fragmentSelections, visitedFragments, s) else s } yield fragmentFields - case ast.FragmentSpread(name, _, _, _) if visitedFragments contains name ⇒ s - case ast.FragmentSpread(name, dirs, _, position) ⇒ - shouldIncludeNode(dirs, selection) flatMap { shouldInclude ⇒ + case ast.FragmentSpread(name, _, _, _) if visitedFragments contains name => s + case ast.FragmentSpread(name, dirs, _, position) => + shouldIncludeNode(dirs, selection) flatMap { shouldInclude => if (shouldInclude) { visitedFragments += name document.fragments.get(name) match { - case Some(fragment) ⇒ + case Some(fragment) => for { - shouldInclude ← shouldIncludeNode(fragment.directives, fragment) - fragmentConditionMatch ← doesFragmentConditionMatch(tpe, fragment) - fragmentFields ← + shouldInclude <- shouldIncludeNode(fragment.directives, fragment) + fragmentConditionMatch <- doesFragmentConditionMatch(tpe, fragment) + fragmentFields <- if (shouldInclude && fragmentConditionMatch) collectFieldsInternal(tpe, fragment.selections, visitedFragments, s) else s } yield fragmentFields - case None ⇒ + case None => Failure(new ExecutionError(s"Fragment with name '$name' is not defined", exceptionHandler, sourceMapper, position.toList)) } } else s @@ -83,31 +83,31 @@ class FieldCollector[Ctx, Val]( def shouldIncludeNode(directives: Vector[ast.Directive], selection: ast.WithDirectives): Try[Boolean] = { val possibleDirs = directives - .map(d ⇒ schema.directivesByName + .map(d => schema.directivesByName .get(d.name) - .map(dd ⇒ selection match { - case _: ast.Field if !dd.locations.contains(DirectiveLocation.Field) ⇒ + .map(dd => selection match { + case _: ast.Field if !dd.locations.contains(DirectiveLocation.Field) => Failure(new ExecutionError(s"Directive '${dd.name}' is not allowed to be used on fields", exceptionHandler, sourceMapper, d.location.toList)) - case _: ast.InlineFragment if !dd.locations.contains(DirectiveLocation.InlineFragment) ⇒ + case _: ast.InlineFragment if !dd.locations.contains(DirectiveLocation.InlineFragment) => Failure(new ExecutionError(s"Directive '${dd.name}' is not allowed to be used on inline fragment", exceptionHandler, sourceMapper, d.location.toList)) - case _: ast.FragmentSpread if !dd.locations.contains(DirectiveLocation.FragmentSpread) ⇒ + case _: ast.FragmentSpread if !dd.locations.contains(DirectiveLocation.FragmentSpread) => Failure(new ExecutionError(s"Directive '${dd.name}' is not allowed to be used on fragment spread", exceptionHandler, sourceMapper, d.location.toList)) - case _: ast.FragmentDefinition if !dd.locations.contains(DirectiveLocation.FragmentDefinition) ⇒ + case _: ast.FragmentDefinition if !dd.locations.contains(DirectiveLocation.FragmentDefinition) => Failure(new ExecutionError(s"Directive '${dd.name}' is not allowed to be used on fragment definition", exceptionHandler, sourceMapper, d.location.toList)) - case op: ast.OperationDefinition if op.operationType == OperationType.Query && !dd.locations.contains(DirectiveLocation.Query) ⇒ + case op: ast.OperationDefinition if op.operationType == OperationType.Query && !dd.locations.contains(DirectiveLocation.Query) => Failure(new ExecutionError(s"Directive '${dd.name}' is not allowed to be used on query operation", exceptionHandler, sourceMapper, d.location.toList)) - case op: ast.OperationDefinition if op.operationType == OperationType.Mutation && !dd.locations.contains(DirectiveLocation.Mutation) ⇒ + case op: ast.OperationDefinition if op.operationType == OperationType.Mutation && !dd.locations.contains(DirectiveLocation.Mutation) => Failure(new ExecutionError(s"Directive '${dd.name}' is not allowed to be used on mutation operation", exceptionHandler, sourceMapper, d.location.toList)) - case op: ast.OperationDefinition if op.operationType == OperationType.Subscription && !dd.locations.contains(DirectiveLocation.Subscription) ⇒ + case op: ast.OperationDefinition if op.operationType == OperationType.Subscription && !dd.locations.contains(DirectiveLocation.Subscription) => Failure(new ExecutionError(s"Directive '${dd.name}' is not allowed to be used on subscription operation", exceptionHandler, sourceMapper, d.location.toList)) - case _ ⇒ Success(d → dd) + case _ => Success(d -> dd) }) .getOrElse(Failure(new ExecutionError(s"Directive '${d.name}' not found.", exceptionHandler, sourceMapper, d.location.toList)))) - .map(_.flatMap{case (astDir, dir) ⇒ valueCollector.getArgumentValues(Some(astDir), dir.arguments, astDir.arguments, variables) map (dir → _)}) + .map(_.flatMap{case (astDir, dir) => valueCollector.getArgumentValues(Some(astDir), dir.arguments, astDir.arguments, variables) map (dir -> _)}) - possibleDirs.collect{case Failure(error) ⇒ error}.headOption map (Failure(_)) getOrElse { - val validDirs = possibleDirs collect {case Success(v) ⇒ v} - val should = validDirs.forall { case (dir, args) ⇒ dir.shouldInclude(DirectiveContext(selection, dir, args)) } + possibleDirs.collect{case Failure(error) => error}.headOption map (Failure(_)) getOrElse { + val validDirs = possibleDirs collect {case Success(v) => v} + val should = validDirs.forall { case (dir, args) => dir.shouldInclude(DirectiveContext(selection, dir, args)) } Success(should) } @@ -115,11 +115,11 @@ class FieldCollector[Ctx, Val]( def doesFragmentConditionMatch(tpe: ObjectType[_, _], conditional: ast.ConditionalFragment): Try[Boolean] = conditional.typeConditionOpt match { - case Some(tc) ⇒ + case Some(tc) => schema.outputTypes.get(tc.name) - .map(condTpe ⇒ Success(condTpe.name == tpe.name || (condTpe.isInstanceOf[AbstractType] && schema.isPossibleType(condTpe.name, tpe)))) + .map(condTpe => Success(condTpe.name == tpe.name || (condTpe.isInstanceOf[AbstractType] && schema.isPossibleType(condTpe.name, tpe)))) .getOrElse(Failure(new ExecutionError(s"Unknown type '${tc.name}'.", exceptionHandler, sourceMapper, conditional.location.toList))) - case None ⇒ Success(true) + case None => Success(true) } } @@ -136,12 +136,12 @@ class CollectedFieldsBuilder { def contains(name: String) = indexLookup contains name def add(name: String, field: ast.Field) = { indexLookup.get(name) match { - case Some(idx) ⇒ + case Some(idx) => fields(idx) match { - case s @ Success(list) ⇒ list += field - case _ ⇒ // do nothing because there is already an error + case s @ Success(list) => list += field + case _ => // do nothing because there is already an error } - case None ⇒ + case None => indexLookup(name) = fields.size firstFields += field names += name @@ -153,13 +153,13 @@ class CollectedFieldsBuilder { def addError(name: String, field: ast.Field, error: Throwable) = { indexLookup.get(name) match { - case Some(idx) ⇒ + case Some(idx) => fields(idx) match { - case s @ Success(list) ⇒ + case s @ Success(list) => fields(idx) = Failure(error) - case _ ⇒ // do nothing because there is already an error + case _ => // do nothing because there is already an error } - case None ⇒ + case None => indexLookup(name) = fields.size firstFields += field names += name @@ -171,7 +171,7 @@ class CollectedFieldsBuilder { def build = { val builtFields = firstFields.toVector.zipWithIndex map { - case (f, idx) ⇒ CollectedField(names(idx), f, fields(idx) map (_.toVector)) + case (f, idx) => CollectedField(names(idx), f, fields(idx) map (_.toVector)) } CollectedFields(names.toVector, builtFields) diff --git a/src/main/scala/sangria/execution/InputDocumentMaterializer.scala b/src/main/scala/sangria/execution/InputDocumentMaterializer.scala index 63ef3edd..0ab3a978 100644 --- a/src/main/scala/sangria/execution/InputDocumentMaterializer.scala +++ b/src/main/scala/sangria/execution/InputDocumentMaterializer.scala @@ -26,37 +26,37 @@ case class InputDocumentMaterializer[Vars](schema: Schema[_, _], variables: Vars val variableDefinitions = inferVariableDefinitions(document, inputType) collector.getVariableValues(variableDefinitions, None) match { - case Failure(e) ⇒ scheme.failure(e) - case Success(vars) ⇒ + case Failure(e) => scheme.failure(e) + case Success(vars) => try { - scheme.success(document.values flatMap { value ⇒ + scheme.success(document.values flatMap { value => collector.coercionHelper.coerceInputValue(inputType, Nil, value, None, Some(vars), fromInput.marshaller, fromInput.marshaller, isArgument = false) match { - case Left(vs) ⇒ throw InputDocumentMaterializationError(vs, ExceptionHandler.empty) - case Right(coerced) ⇒ coerced.toOption.map(res ⇒ fromInput.fromResult(res)) + case Left(vs) => throw InputDocumentMaterializationError(vs, ExceptionHandler.empty) + case Right(coerced) => coerced.toOption.map(res => fromInput.fromResult(res)) } }) } catch { - case NonFatal(e) ⇒ scheme.failure(e) + case NonFatal(e) => scheme.failure(e) } } } } def inferVariableDefinitions[T](document: InputDocument, inputType: InputType[T]) = { - document.values.flatMap { v ⇒ - AstVisitor.visitAstWithState(schema, v, new mutable.HashMap[String, VariableDefinition]) { (typeInfo, state) ⇒ + document.values.flatMap { v => + AstVisitor.visitAstWithState(schema, v, new mutable.HashMap[String, VariableDefinition]) { (typeInfo, state) => typeInfo.withInputType(inputType) AstVisitor { - case v: ast.VariableValue if typeInfo.inputType.isDefined ⇒ + case v: ast.VariableValue if typeInfo.inputType.isDefined => val parentType = typeInfo.inputType.get val parentTypeAst = SchemaRenderer.renderTypeNameAst(parentType) state.get(v.name) match { - case None ⇒ + case None => state(v.name) = ast.VariableDefinition(v.name, parentTypeAst, None) VisitorCommand.Continue - case _ ⇒ VisitorCommand.Continue + case _ => VisitorCommand.Continue } } }.values.toVector @@ -97,6 +97,6 @@ object InputDocumentMaterializer { Schema(ObjectType("Query", fields[Unit, Unit]( Field("stub", StringType, arguments = Argument("stub", inputType) :: Nil, - resolve = _ ⇒ "stub")))) + resolve = _ => "stub")))) } diff --git a/src/main/scala/sangria/execution/QueryReducer.scala b/src/main/scala/sangria/execution/QueryReducer.scala index 4d8c5012..68ec5816 100644 --- a/src/main/scala/sangria/execution/QueryReducer.scala +++ b/src/main/scala/sangria/execution/QueryReducer.scala @@ -37,34 +37,34 @@ trait QueryReducer[-Ctx, +Out] { } object QueryReducer { - type ArgumentValuesFn = (ExecutionPath, List[Argument[_]], Vector[ast.Argument]) ⇒ Try[Args] + type ArgumentValuesFn = (ExecutionPath, List[Argument[_]], Vector[ast.Argument]) => Try[Args] - def measureComplexity[Ctx](fn: (Double, Ctx) ⇒ ReduceAction[Ctx, Ctx]): QueryReducer[Ctx, Ctx] = + def measureComplexity[Ctx](fn: (Double, Ctx) => ReduceAction[Ctx, Ctx]): QueryReducer[Ctx, Ctx] = new MeasureComplexity[Ctx](fn) - def rejectComplexQueries[Ctx](complexityThreshold: Double, error: (Double, Ctx) ⇒ Throwable): QueryReducer[Ctx, Ctx] = - measureComplexity[Ctx]((c, ctx) ⇒ + def rejectComplexQueries[Ctx](complexityThreshold: Double, error: (Double, Ctx) => Throwable): QueryReducer[Ctx, Ctx] = + measureComplexity[Ctx]((c, ctx) => if (c >= complexityThreshold) throw error(c, ctx) else ctx) - def measureDepth[Ctx](fn: (Int, Ctx) ⇒ ReduceAction[Ctx, Ctx]): QueryReducer[Ctx, Ctx] = + def measureDepth[Ctx](fn: (Int, Ctx) => ReduceAction[Ctx, Ctx]): QueryReducer[Ctx, Ctx] = new MeasureQueryDepth[Ctx](fn) def rejectMaxDepth[Ctx](maxDepth: Int): QueryReducer[Ctx, Ctx] = - measureDepth[Ctx]((depth, ctx) ⇒ + measureDepth[Ctx]((depth, ctx) => if (depth > maxDepth) throw new MaxQueryDepthReachedError(maxDepth) else ctx) - def collectTags[Ctx, T](tagMatcher: PartialFunction[FieldTag, T])(fn: (Seq[T], Ctx) ⇒ ReduceAction[Ctx, Ctx]): QueryReducer[Ctx, Ctx] = + def collectTags[Ctx, T](tagMatcher: PartialFunction[FieldTag, T])(fn: (Seq[T], Ctx) => ReduceAction[Ctx, Ctx]): QueryReducer[Ctx, Ctx] = new TagCollector[Ctx, T](tagMatcher, fn) def rejectIntrospection[Ctx](includeTypeName: Boolean = true): QueryReducer[Ctx, Ctx] = - hasIntrospection((hasIntro, ctx) ⇒ if (hasIntro) throw IntrospectionNotAllowedError else ctx, includeTypeName) + hasIntrospection((hasIntro, ctx) => if (hasIntro) throw IntrospectionNotAllowedError else ctx, includeTypeName) - def hasIntrospection[Ctx](fn: (Boolean, Ctx) ⇒ ReduceAction[Ctx, Ctx], includeTypeName: Boolean = true): QueryReducer[Ctx, Ctx] = + def hasIntrospection[Ctx](fn: (Boolean, Ctx) => ReduceAction[Ctx, Ctx], includeTypeName: Boolean = true): QueryReducer[Ctx, Ctx] = new HasIntrospectionReducer[Ctx](includeTypeName, fn) } -class MeasureComplexity[Ctx](action: (Double, Ctx) ⇒ ReduceAction[Ctx, Ctx]) extends QueryReducer[Ctx, Ctx] { +class MeasureComplexity[Ctx](action: (Double, Ctx) => ReduceAction[Ctx, Ctx]) extends QueryReducer[Ctx, Ctx] { type Acc = Double import MeasureComplexity.DefaultComplexity @@ -83,12 +83,12 @@ class MeasureComplexity[Ctx](action: (Double, Ctx) ⇒ ReduceAction[Ctx, Ctx]) e field: Field[Ctx, Val], argumentValuesFn: QueryReducer.ArgumentValuesFn): Acc = { val estimate = field.complexity match { - case Some(fn) ⇒ + case Some(fn) => argumentValuesFn(path, field.arguments, astFields.head.arguments) match { - case Success(args) ⇒ fn(ctx, args, childrenAcc) - case Failure(_) ⇒ DefaultComplexity + childrenAcc + case Success(args) => fn(ctx, args, childrenAcc) + case Failure(_) => DefaultComplexity + childrenAcc } - case None ⇒ DefaultComplexity + childrenAcc + case None => DefaultComplexity + childrenAcc } fieldAcc + estimate @@ -108,7 +108,7 @@ class MeasureComplexity[Ctx](action: (Double, Ctx) ⇒ ReduceAction[Ctx, Ctx]) e action(acc, ctx) } -class MeasureQueryDepth[Ctx](action: (Int, Ctx) ⇒ ReduceAction[Ctx, Ctx]) extends QueryReducer[Ctx, Ctx] { +class MeasureQueryDepth[Ctx](action: (Int, Ctx) => ReduceAction[Ctx, Ctx]) extends QueryReducer[Ctx, Ctx] { type Acc = Int def reduceAlternatives(alternatives: Seq[Acc]) = alternatives.max @@ -144,7 +144,7 @@ object MeasureComplexity { val DefaultComplexity = 1.0D } -class TagCollector[Ctx, T](tagMatcher: PartialFunction[FieldTag, T], action: (Seq[T], Ctx) ⇒ ReduceAction[Ctx, Ctx]) extends QueryReducer[Ctx, Ctx] { +class TagCollector[Ctx, T](tagMatcher: PartialFunction[FieldTag, T], action: (Seq[T], Ctx) => ReduceAction[Ctx, Ctx]) extends QueryReducer[Ctx, Ctx] { type Acc = Vector[T] val initial = Vector.empty @@ -160,7 +160,7 @@ class TagCollector[Ctx, T](tagMatcher: PartialFunction[FieldTag, T], action: (Se parentType: ObjectType[Ctx, Val], field: Field[Ctx, Val], argumentValuesFn: QueryReducer.ArgumentValuesFn): Acc = - fieldAcc ++ childrenAcc ++ field.tags.collect {case t if tagMatcher.isDefinedAt(t) ⇒ tagMatcher(t)} + fieldAcc ++ childrenAcc ++ field.tags.collect {case t if tagMatcher.isDefinedAt(t) => tagMatcher(t)} def reduceScalar[ST]( path: ExecutionPath, @@ -176,12 +176,12 @@ class TagCollector[Ctx, T](tagMatcher: PartialFunction[FieldTag, T], action: (Se action(acc, ctx) } -class HasIntrospectionReducer[Ctx](includeTypeName: Boolean, action: (Boolean, Ctx) ⇒ ReduceAction[Ctx, Ctx]) extends QueryReducer[Ctx, Ctx] { +class HasIntrospectionReducer[Ctx](includeTypeName: Boolean, action: (Boolean, Ctx) => ReduceAction[Ctx, Ctx]) extends QueryReducer[Ctx, Ctx] { type Acc = Boolean val initial = false - def reduceAlternatives(alternatives: Seq[Acc]) = alternatives.exists(hasIntro ⇒ hasIntro) + def reduceAlternatives(alternatives: Seq[Acc]) = alternatives.exists(hasIntro => hasIntro) def reduceField[Val]( fieldAcc: Acc, diff --git a/src/main/scala/sangria/execution/QueryReducerExecutor.scala b/src/main/scala/sangria/execution/QueryReducerExecutor.scala index b5ee945b..b4dd6a56 100644 --- a/src/main/scala/sangria/execution/QueryReducerExecutor.scala +++ b/src/main/scala/sangria/execution/QueryReducerExecutor.scala @@ -31,15 +31,15 @@ object QueryReducerExecutor { val valueCollector = new ValueCollector[Ctx, _ @@ ScalaInput](schema, InputUnmarshaller.emptyMapVars, queryAst.sourceMapper, deprecationTracker, userContext, exceptionHandler, scalarMiddleware, true)(InputUnmarshaller.scalaInputUnmarshaller[_ @@ ScalaInput]) val executionResult = for { - operation ← Executor.getOperation(exceptionHandler,queryAst, operationName) + operation <- Executor.getOperation(exceptionHandler,queryAst, operationName) fieldCollector = new FieldCollector[Ctx, Root](schema, queryAst, Map.empty, queryAst.sourceMapper, valueCollector, exceptionHandler) - tpe ← Executor.getOperationRootType(schema, exceptionHandler, operation, queryAst.sourceMapper) - fields ← fieldCollector.collectFields(ExecutionPath.empty, tpe, Vector(operation)) + tpe <- Executor.getOperationRootType(schema, exceptionHandler, operation, queryAst.sourceMapper) + fields <- fieldCollector.collectFields(ExecutionPath.empty, tpe, Vector(operation)) } yield QueryReducerExecutor.reduceQuery(schema, queryReducers, exceptionHandler, fieldCollector, valueCollector, Map.empty, tpe, fields, userContext) executionResult match { - case Success(future) ⇒ future - case Failure(error) ⇒ Future.failed(error) + case Success(future) => future + case Failure(error) => Future.failed(error) } } } @@ -58,9 +58,9 @@ object QueryReducerExecutor { if (queryReducers.nonEmpty) { val sw = StopWatch.start() reduceQueryUnsafe(schema, fieldCollector, valueCollector, variables, rootTpe, fields, queryReducers.toVector, userContext) - .map(_ → sw.stop) - .recover { case error: Throwable ⇒ throw QueryReducingError(error, exceptionHandler) } - } else Future.successful(userContext → TimeMeasurement.empty) + .map(_ -> sw.stop) + .recover { case error: Throwable => throw QueryReducingError(error, exceptionHandler) } + } else Future.successful(userContext -> TimeMeasurement.empty) private def reduceQueryUnsafe[Ctx, Val]( schema: Schema[Ctx, _], @@ -72,27 +72,27 @@ object QueryReducerExecutor { reducers: Vector[QueryReducer[Ctx, _]], userContext: Ctx)(implicit executionContext: ExecutionContext): Future[Ctx] = { val argumentValuesFn: QueryReducer.ArgumentValuesFn = - (path: ExecutionPath, argumentDefs: List[Argument[_]], argumentAsts: Vector[ast.Argument]) ⇒ + (path: ExecutionPath, argumentDefs: List[Argument[_]], argumentAsts: Vector[ast.Argument]) => valueCollector.getFieldArgumentValues(path, None, argumentDefs, argumentAsts, variables) val initialValues: Vector[Any] = reducers map (_.initial) def loop(path: ExecutionPath, tpe: OutputType[_], astFields: Vector[ast.Field]): Seq[Any] = tpe match { - case OptionType(ofType) ⇒ loop(path, ofType, astFields) - case ListType(ofType) ⇒ loop(path, ofType, astFields) - case objTpe: ObjectType[Ctx @unchecked, _] ⇒ + case OptionType(ofType) => loop(path, ofType, astFields) + case ListType(ofType) => loop(path, ofType, astFields) + case objTpe: ObjectType[Ctx @unchecked, _] => fieldCollector.collectFields(path, objTpe, astFields) match { - case Success(ff) ⇒ + case Success(ff) => // Using mutability here locally in order to reduce footprint ff.fields.foldLeft(Array(initialValues: _*)) { - case (acc, CollectedField(_, _, Success(fields))) if objTpe.getField(schema, fields.head.name).nonEmpty ⇒ + case (acc, CollectedField(_, _, Success(fields))) if objTpe.getField(schema, fields.head.name).nonEmpty => val astField = fields.head val field = objTpe.getField(schema, astField.name).head val newPath = path.add(astField, objTpe) val childReduced = loop(newPath, field.fieldType, fields) - for (i ← reducers.indices) { + for (i <- reducers.indices) { val reducer = reducers(i) acc(i) = reducer.reduceField[Any]( @@ -104,34 +104,34 @@ object QueryReducerExecutor { } acc - case (acc, _) ⇒ acc + case (acc, _) => acc } - case Failure(_) ⇒ initialValues + case Failure(_) => initialValues } - case abst: AbstractType ⇒ + case abst: AbstractType => schema.possibleTypes .get (abst.name) - .map (types ⇒ + .map (types => types.map(loop(path, _, astFields)).transpose.zipWithIndex.map{ - case (values, idx) ⇒ + case (values, idx) => val reducer = reducers(idx) reducer.reduceAlternatives(values.asInstanceOf[Seq[reducer.Acc]]) }) .getOrElse (initialValues) - case s: ScalarType[_] ⇒ reducers map (_.reduceScalar(path, userContext, s)) - case ScalarAlias(aliasFor, _, _) ⇒ reducers map (_.reduceScalar(path, userContext, aliasFor)) - case e: EnumType[_] ⇒ reducers map (_.reduceEnum(path, userContext, e)) - case _ ⇒ initialValues + case s: ScalarType[_] => reducers map (_.reduceScalar(path, userContext, s)) + case ScalarAlias(aliasFor, _, _) => reducers map (_.reduceScalar(path, userContext, aliasFor)) + case e: EnumType[_] => reducers map (_.reduceEnum(path, userContext, e)) + case _ => initialValues } val reduced = fields.fields.foldLeft(Array(initialValues: _*)) { - case (acc, CollectedField(_, _, Success(astFields))) if rootTpe.getField(schema, astFields.head.name).nonEmpty ⇒ + case (acc, CollectedField(_, _, Success(astFields))) if rootTpe.getField(schema, astFields.head.name).nonEmpty => val astField = astFields.head val field = rootTpe.getField(schema, astField.name).head val path = ExecutionPath.empty.add(astField, rootTpe) val childReduced = loop(path, field.fieldType, astFields) - for (i ← reducers.indices) { + for (i <- reducers.indices) { val reducer = reducers(i) acc(i) = reducer.reduceField( @@ -143,36 +143,36 @@ object QueryReducerExecutor { } acc - case (acc, _) ⇒ acc + case (acc, _) => acc } val newContext = try { // Unsafe part to avoid additional boxing in order to reduce the footprint reducers.zipWithIndex.foldLeft(userContext: Any) { - case (acc: Future[Ctx @unchecked], (reducer, idx)) ⇒ - acc.flatMap(a ⇒ reducer.reduceCtx(reduced(idx).asInstanceOf[reducer.Acc], a) match { - case FutureValue(future) ⇒ future - case Value(value) ⇒ Future.successful(value) - case TryValue(value) ⇒ Future.fromTry(value) + case (acc: Future[Ctx @unchecked], (reducer, idx)) => + acc.flatMap(a => reducer.reduceCtx(reduced(idx).asInstanceOf[reducer.Acc], a) match { + case FutureValue(future) => future + case Value(value) => Future.successful(value) + case TryValue(value) => Future.fromTry(value) }) - case (acc: Ctx @unchecked, (reducer, idx)) ⇒ + case (acc: Ctx @unchecked, (reducer, idx)) => reducer.reduceCtx(reduced(idx).asInstanceOf[reducer.Acc], acc) match { - case FutureValue(future) ⇒ future - case Value(value) ⇒ value - case TryValue(value) ⇒ value.get + case FutureValue(future) => future + case Value(value) => value + case TryValue(value) => value.get } - case (acc, _) ⇒ Future.failed(new IllegalStateException(s"Invalid shape of the user context! $acc")) + case (acc, _) => Future.failed(new IllegalStateException(s"Invalid shape of the user context! $acc")) } } catch { - case NonFatal(error) ⇒ Future.failed(error) + case NonFatal(error) => Future.failed(error) } newContext match { - case fut: Future[Ctx @unchecked] ⇒ fut - case ctx ⇒ Future.successful(ctx.asInstanceOf[Ctx]) + case fut: Future[Ctx @unchecked] => fut + case ctx => Future.successful(ctx.asInstanceOf[Ctx]) } } diff --git a/src/main/scala/sangria/execution/Resolver.scala b/src/main/scala/sangria/execution/Resolver.scala index 10a6eeb2..7476c9f9 100644 --- a/src/main/scala/sangria/execution/Resolver.scala +++ b/src/main/scala/sangria/execution/Resolver.scala @@ -44,69 +44,69 @@ class Resolver[Ctx]( def resolveFieldsPar(tpe: ObjectType[Ctx, _], value: Any, fields: CollectedFields)(scheme: ExecutionScheme): scheme.Result[Ctx, marshaller.Node] = { val actions = collectActionsPar(ExecutionPath.empty, tpe, value, fields, ErrorRegistry.empty, userContext) - handleScheme(processFinalResolve(resolveActionsPar(ExecutionPath.empty, tpe, actions, userContext, fields.namesOrdered)) map (_ → userContext), scheme) + handleScheme(processFinalResolve(resolveActionsPar(ExecutionPath.empty, tpe, actions, userContext, fields.namesOrdered)) map (_ -> userContext), scheme) } def resolveFieldsSeq(tpe: ObjectType[Ctx, _], value: Any, fields: CollectedFields)(scheme: ExecutionScheme): scheme.Result[Ctx, marshaller.Node] = { val result = resolveSeq(ExecutionPath.empty, tpe, value, fields, ErrorRegistry.empty) - handleScheme(result flatMap (res ⇒ processFinalResolve(res._1).map(_ → res._2)), scheme) + handleScheme(result flatMap (res => processFinalResolve(res._1).map(_ -> res._2)), scheme) } def resolveFieldsSubs(tpe: ObjectType[Ctx, _], value: Any, fields: CollectedFields)(scheme: ExecutionScheme): scheme.Result[Ctx, marshaller.Node] = { scheme match { - case ExecutionScheme.Default ⇒ + case ExecutionScheme.Default => val (s, res) = resolveSubs[({type X[Y]})#X](ExecutionPath.empty, tpe, value, fields, ErrorRegistry.empty, None) s.first(res).map(_._2).asInstanceOf[scheme.Result[Ctx, marshaller.Node]] - case ExecutionScheme.Extended ⇒ + case ExecutionScheme.Extended => val (s, res) = resolveSubs[({type X[Y]})#X](ExecutionPath.empty, tpe, value, fields, ErrorRegistry.empty, None) - s.first(res).map{case (errors, res) ⇒ ExecutionResult(userContext, res, errors, middleware, validationTiming, queryReducerTiming)}.asInstanceOf[scheme.Result[Ctx, marshaller.Node]] + s.first(res).map{case (errors, res) => ExecutionResult(userContext, res, errors, middleware, validationTiming, queryReducerTiming)}.asInstanceOf[scheme.Result[Ctx, marshaller.Node]] - case es: ExecutionScheme.StreamBasedExecutionScheme[({type X[Y]})#X @unchecked] ⇒ + case es: ExecutionScheme.StreamBasedExecutionScheme[({type X[Y]})#X @unchecked] => val (_, res) = resolveSubs(ExecutionPath.empty, tpe, value, fields, ErrorRegistry.empty, Some(es.subscriptionStream)) es.subscriptionStream.map(res) { - case (errors, r) if es.extended ⇒ ExecutionResult(userContext, r, errors, middleware, validationTiming, queryReducerTiming) - case (_, r) ⇒ r + case (errors, r) if es.extended => ExecutionResult(userContext, r, errors, middleware, validationTiming, queryReducerTiming) + case (_, r) => r }.asInstanceOf[scheme.Result[Ctx, marshaller.Node]] - case s ⇒ + case s => throw new IllegalStateException(s"Unsupported execution scheme: $s") } } def handleScheme(result: Future[((Vector[RegisteredError], marshaller.Node), Ctx)], scheme: ExecutionScheme): scheme.Result[Ctx, marshaller.Node] = scheme match { - case ExecutionScheme.Default ⇒ - result.map{case ((_, res), _) ⇒ res}.asInstanceOf[scheme.Result[Ctx, marshaller.Node]] + case ExecutionScheme.Default => + result.map{case ((_, res), _) => res}.asInstanceOf[scheme.Result[Ctx, marshaller.Node]] - case ExecutionScheme.Extended ⇒ - result.map{case ((errors, res), uc) ⇒ ExecutionResult(uc, res, errors, middleware, validationTiming, queryReducerTiming)}.asInstanceOf[scheme.Result[Ctx, marshaller.Node]] + case ExecutionScheme.Extended => + result.map{case ((errors, res), uc) => ExecutionResult(uc, res, errors, middleware, validationTiming, queryReducerTiming)}.asInstanceOf[scheme.Result[Ctx, marshaller.Node]] - case s: ExecutionScheme.StreamBasedExecutionScheme[_] ⇒ + case s: ExecutionScheme.StreamBasedExecutionScheme[_] => s.subscriptionStream.singleFuture(result.map { - case ((errors, res), uc) if s.extended ⇒ ExecutionResult(uc, res, errors, middleware, validationTiming, queryReducerTiming) - case ((_, res), _) ⇒ res + case ((errors, res), uc) if s.extended => ExecutionResult(uc, res, errors, middleware, validationTiming, queryReducerTiming) + case ((_, res), _) => res }).asInstanceOf[scheme.Result[Ctx, marshaller.Node]] - case s ⇒ + case s => throw new IllegalStateException(s"Unsupported execution scheme: $s") } def processFinalResolve(resolve: Resolve) = resolve match { - case Result(errors, data, _) ⇒ + case Result(errors, data, _) => Future.successful( - errors.originalErrors → + errors.originalErrors -> marshalResult( data.asInstanceOf[Option[resultResolver.marshaller.Node]], marshalErrors(errors), marshallExtensions.asInstanceOf[Option[resultResolver.marshaller.Node]]).asInstanceOf[marshaller.Node]) - case dr: DeferredResult ⇒ - immediatelyResolveDeferred(userContext, dr, _ map { case (Result(errors, data, _)) ⇒ - errors.originalErrors → + case dr: DeferredResult => + immediatelyResolveDeferred(userContext, dr, _ map { case (Result(errors, data, _)) => + errors.originalErrors -> marshalResult( data.asInstanceOf[Option[resultResolver.marshaller.Node]], marshalErrors(errors), @@ -117,25 +117,25 @@ class Resolver[Ctx]( private def marshallExtensions: Option[marshaller.Node] = { val extensions = middleware flatMap { - case (v, m: MiddlewareExtension[Ctx]) ⇒ m.afterQueryExtensions(v.asInstanceOf[m.QueryVal], middlewareCtx) - case _⇒ Nil + case (v, m: MiddlewareExtension[Ctx]) => m.afterQueryExtensions(v.asInstanceOf[m.QueryVal], middlewareCtx) + case _=> Nil } if (extensions.nonEmpty) ResultResolver.marshalExtensions(marshaller, extensions) else None } - private def immediatelyResolveDeferred[T](uc: Ctx, dr: DeferredResult, fn: Future[Result] ⇒ Future[T]): Future[T] = { + private def immediatelyResolveDeferred[T](uc: Ctx, dr: DeferredResult, fn: Future[Result] => Future[T]): Future[T] = { val res = fn(dr.futureValue) - resolveDeferredWithGrouping(dr.deferred).foreach(groups ⇒ - groups.foreach(group ⇒ resolveDeferred(uc, group))) + resolveDeferredWithGrouping(dr.deferred).foreach(groups => + groups.foreach(group => resolveDeferred(uc, group))) res } private def resolveDeferredWithGrouping(deferred: Vector[Future[Vector[Defer]]]) = - Future.sequence(deferred).map(listOfDef ⇒ deferredResolver.groupDeferred(listOfDef.flatten)) + Future.sequence(deferred).map(listOfDef => deferredResolver.groupDeferred(listOfDef.flatten)) private type Actions = (ErrorRegistry, Option[Vector[(Vector[ast.Field], Option[(Field[Ctx, _], Option[MappedCtxUpdate[Ctx, Any, Any]], LeafAction[Ctx, _])])]]) @@ -146,8 +146,8 @@ class Resolver[Ctx]( fields: CollectedFields, errorReg: ErrorRegistry, requestedStream: Option[SubscriptionStream[S]]): (SubscriptionStream[S], S[(Vector[RegisteredError], marshaller.Node)]) = { - val firstStream = tpe.uniqueFields.head.tags.collectFirst{case SubscriptionField(s) ⇒ s}.get.asInstanceOf[SubscriptionStream[S]] - val stream = requestedStream.fold(firstStream) { s ⇒ + val firstStream = tpe.uniqueFields.head.tags.collectFirst{case SubscriptionField(s) => s}.get.asInstanceOf[SubscriptionStream[S]] + val stream = requestedStream.fold(firstStream) { s => if (s.supported(firstStream)) s else throw new IllegalStateException("Subscription type field stream implementation is incompatible with requested stream implementation") } @@ -156,24 +156,24 @@ class Resolver[Ctx]( stream.single(result) val fieldStreams = fields.fields.flatMap { - case CollectedField(name, origField, _) if tpe.getField(schema, origField.name).isEmpty ⇒ None - case CollectedField(name, origField, Failure(error)) ⇒ + case CollectedField(name, origField, _) if tpe.getField(schema, origField.name).isEmpty => None + case CollectedField(name, origField, Failure(error)) => val resMap = marshaller.emptyMapNode(Seq(origField.outputName)) Some(marshallResult(Result(errorReg.add(path.add(origField, tpe), error), if (isOptional(tpe, origField.name)) Some(marshaller.addMapNodeElem(resMap, origField.outputName, marshaller.nullNode, optional = true)) else None))) - case CollectedField(name, origField, Success(fields)) ⇒ + case CollectedField(name, origField, Success(fields)) => resolveField(userContext, tpe, path.add(origField, tpe), value, ErrorRegistry.empty, name, fields) match { - case ErrorFieldResolution(updatedErrors) if isOptional(tpe, origField.name) ⇒ + case ErrorFieldResolution(updatedErrors) if isOptional(tpe, origField.name) => val resMap = marshaller.emptyMapNode(Seq(origField.outputName)) Some(marshallResult( Result(updatedErrors, Some(marshaller.addMapNodeElem(resMap.asInstanceOf[marshaller.MapBuilder], fields.head.outputName, marshaller.nullNode, optional = isOptional(tpe, origField.name)))))) - case ErrorFieldResolution(updatedErrors) ⇒ Some(marshallResult(Result(updatedErrors, None))) - case StreamFieldResolution(updatedErrors, svalue, standardFn) ⇒ - val s = svalue.stream.mapFuture[Any, Result](svalue.source) { action ⇒ + case ErrorFieldResolution(updatedErrors) => Some(marshallResult(Result(updatedErrors, None))) + case StreamFieldResolution(updatedErrors, svalue, standardFn) => + val s = svalue.stream.mapFuture[Any, Result](svalue.source) { action => val res = Result(updatedErrors, Some(marshaller.emptyMapNode(Seq(origField.outputName)))) val resMap = res.value.get val standardAction = standardFn(action) @@ -182,7 +182,7 @@ class Resolver[Ctx]( .map(_._1) } - val recovered = svalue.stream.recover(s) { e ⇒ + val recovered = svalue.stream.recover(s) { e => val resMap = marshaller.emptyMapNode(Seq(origField.outputName)) Result(updatedErrors.add(path.add(origField, tpe), e), @@ -194,7 +194,7 @@ class Resolver[Ctx]( } } - stream → stream.mapFuture(stream.merge(fieldStreams.asInstanceOf[Vector[S[Result]]]))(r ⇒ processFinalResolve(r.buildValue)) + stream -> stream.mapFuture(stream.merge(fieldStreams.asInstanceOf[Vector[S[Result]]]))(r => processFinalResolve(r.buildValue)) } def resolveSeq( @@ -204,20 +204,20 @@ class Resolver[Ctx]( fields: CollectedFields, errorReg: ErrorRegistry): Future[(Result, Ctx)] = { fields.fields.foldLeft(Future.successful((Result(ErrorRegistry.empty, Some(marshaller.emptyMapNode(fields.namesOrdered))), userContext))) { - case (future, elem) ⇒ future.flatMap { resAndCtx ⇒ + case (future, elem) => future.flatMap { resAndCtx => (resAndCtx, elem) match { - case (acc @ (Result(_, None, _), _), _) ⇒ Future.successful(acc) - case (acc, CollectedField(name, origField, _)) if tpe.getField(schema, origField.name).isEmpty ⇒ Future.successful(acc) - case ((Result(errors, s @ Some(acc), _), uc), CollectedField(name, origField, Failure(error))) ⇒ + case (acc @ (Result(_, None, _), _), _) => Future.successful(acc) + case (acc, CollectedField(name, origField, _)) if tpe.getField(schema, origField.name).isEmpty => Future.successful(acc) + case ((Result(errors, s @ Some(acc), _), uc), CollectedField(name, origField, Failure(error))) => Future.successful(Result(errors.add(path.add(origField, tpe), error), if (isOptional(tpe, origField.name)) Some(marshaller.addMapNodeElem(acc.asInstanceOf[marshaller.MapBuilder], origField.outputName, marshaller.nullNode, optional = true)) - else None) → uc) - case ((accRes @ Result(errors, s @ Some(acc), _), uc), CollectedField(name, origField, Success(fields))) ⇒ + else None) -> uc) + case ((accRes @ Result(errors, s @ Some(acc), _), uc), CollectedField(name, origField, Success(fields))) => resolveSingleFieldSeq(path, uc, tpe, value, errors, name, origField, fields, accRes, acc) } } } map { - case (res, ctx) ⇒ res.buildValue → ctx + case (res, ctx) => res.buildValue -> ctx } } @@ -234,10 +234,10 @@ class Resolver[Ctx]( acc: Any // from `accRes` ): Future[(Result, Ctx)] = resolveField(uc, tpe, path.add(origField, tpe), value, errors, name, fields) match { - case ErrorFieldResolution(updatedErrors) if isOptional(tpe, origField.name) ⇒ - Future.successful(Result(updatedErrors, Some(marshaller.addMapNodeElem(acc.asInstanceOf[marshaller.MapBuilder], fields.head.outputName, marshaller.nullNode, optional = isOptional(tpe, origField.name)))) → uc) - case ErrorFieldResolution(updatedErrors) ⇒ Future.successful(Result(updatedErrors, None) → uc) - case resolution: StandardFieldResolution ⇒ + case ErrorFieldResolution(updatedErrors) if isOptional(tpe, origField.name) => + Future.successful(Result(updatedErrors, Some(marshaller.addMapNodeElem(acc.asInstanceOf[marshaller.MapBuilder], fields.head.outputName, marshaller.nullNode, optional = isOptional(tpe, origField.name)))) -> uc) + case ErrorFieldResolution(updatedErrors) => Future.successful(Result(updatedErrors, None) -> uc) + case resolution: StandardFieldResolution => resolveStandardFieldResolutionSeq(path, uc, tpe, name, origField, fields, accRes, acc, resolution) } @@ -262,43 +262,43 @@ class Resolver[Ctx]( try { newUc foreach (_.onError(e)) } catch { - case NonFatal(ee) ⇒ ee.printStackTrace() + case NonFatal(ee) => ee.printStackTrace() } e } def resolveVal(v: Any) = newUc match { - case Some(MappedCtxUpdate(_, mapFn, _)) ⇒ mapFn(v) - case None ⇒ v + case Some(MappedCtxUpdate(_, mapFn, _)) => mapFn(v) + case None => v } val resolve = try { result match { - case Value(v) ⇒ + case Value(v) => val updatedUc = resolveUc(v) - Future.successful(resolveValue(fieldPath, fields, sfield.fieldType, sfield, resolveVal(v), updatedUc) → updatedUc) + Future.successful(resolveValue(fieldPath, fields, sfield.fieldType, sfield, resolveVal(v), updatedUc) -> updatedUc) - case SequenceLeafAction(actions) ⇒ + case SequenceLeafAction(actions) => val values = resolveActionSequenceValues(fieldPath, fields, sfield, actions) val future = Future.sequence(values.map(_.value)) - val resolved = future.flatMap { vs ⇒ + val resolved = future.flatMap { vs => val errors = vs.flatMap(_.errors).toVector - val successfulValues = vs.collect {case SeqFutRes(v, _, _) if v != null ⇒ v} - val dctx = vs.collect {case SeqFutRes(_, _, d) if d != null ⇒ d} + val successfulValues = vs.collect {case SeqFutRes(v, _, _) if v != null => v} + val dctx = vs.collect {case SeqFutRes(_, _, d) if d != null => d} def resolveDctx(resolve: Resolve) = { val last = dctx.lastOption val init = if (dctx.isEmpty) dctx else dctx.init resolve match { - case res: Result ⇒ + case res: Result => dctx.foreach(_.promise.success(Vector.empty)) Future.successful(res) - case res: DeferredResult ⇒ + case res: DeferredResult => init.foreach(_.promise.success(Vector.empty)) last.foreach(_.promise.success(res.deferred)) res.futureValue @@ -313,68 +313,68 @@ class Resolver[Ctx]( else resolveDctx(Result(ErrorRegistry.empty.append(fieldPath, errors, fields.head.location), None)) }.recover { - case e ⇒ Result(ErrorRegistry(fieldPath, resolveError(e), fields.head.location), None) + case e => Result(ErrorRegistry(fieldPath, resolveError(e), fields.head.location), None) } - val deferred = values.collect {case SeqRes(_, d, _) if d != null ⇒ d}.toVector - val deferredFut = values.collect {case SeqRes(_, _, d) if d != null ⇒ d}.toVector + val deferred = values.collect {case SeqRes(_, d, _) if d != null => d}.toVector + val deferredFut = values.collect {case SeqRes(_, _, d) if d != null => d}.toVector immediatelyResolveDeferred( uc, DeferredResult(Future.successful(deferred) +: deferredFut, resolved), - _.map(r ⇒ r → r.userContext.getOrElse(uc))) + _.map(r => r -> r.userContext.getOrElse(uc))) - case PartialValue(v, es) ⇒ + case PartialValue(v, es) => val updatedUc = resolveUc(v) es foreach resolveError Future.successful( resolveValue(fieldPath, fields, sfield.fieldType, sfield, resolveVal(v), updatedUc) - .appendErrors(fieldPath, es, fields.head.location) → updatedUc) - case TryValue(v) ⇒ + .appendErrors(fieldPath, es, fields.head.location) -> updatedUc) + case TryValue(v) => Future.successful(v match { - case Success(success) ⇒ + case Success(success) => val updatedUc = resolveUc(success) - resolveValue(fieldPath, fields, sfield.fieldType, sfield, resolveVal(success), updatedUc) → updatedUc - case Failure(e) ⇒ Result(ErrorRegistry(fieldPath, resolveError(e), fields.head.location), None) → uc + resolveValue(fieldPath, fields, sfield.fieldType, sfield, resolveVal(success), updatedUc) -> updatedUc + case Failure(e) => Result(ErrorRegistry(fieldPath, resolveError(e), fields.head.location), None) -> uc }) - case DeferredValue(d) ⇒ + case DeferredValue(d) => val p = Promise[(ChildDeferredContext, Any, Vector[Throwable])]() val (args, complexity) = calcComplexity(fieldPath, origField, sfield, userContext) val defer = Defer(p, d, complexity, sfield, fields, args) - immediatelyResolveDeferred(uc, DeferredResult(Vector(Future.successful(Vector(defer))), p.future.flatMap { case (dctx, v, es) ⇒ + immediatelyResolveDeferred(uc, DeferredResult(Vector(Future.successful(Vector(defer))), p.future.flatMap { case (dctx, v, es) => val updatedUc = resolveUc(v) es foreach resolveError resolveValue(fieldPath, fields, sfield.fieldType, sfield, resolveVal(v), updatedUc).appendErrors(fieldPath, es, fields.head.location) match { - case r: Result ⇒ dctx.resolveResult(r.copy(userContext = Some(updatedUc))) - case er: DeferredResult ⇒ + case r: Result => dctx.resolveResult(r.copy(userContext = Some(updatedUc))) + case er: DeferredResult => dctx.resolveDeferredResult(updatedUc, er).map(_.copy(userContext = Some(updatedUc))) } }.recover { - case e ⇒ Result(ErrorRegistry(fieldPath, resolveError(e), fields.head.location), None) - }), _.map(r ⇒ r → r.userContext.getOrElse(uc))) - case FutureValue(f) ⇒ - f.map { v ⇒ + case e => Result(ErrorRegistry(fieldPath, resolveError(e), fields.head.location), None) + }), _.map(r => r -> r.userContext.getOrElse(uc))) + case FutureValue(f) => + f.map { v => val updatedUc = resolveUc(v) - resolveValue(fieldPath, fields, sfield.fieldType, sfield, resolveVal(v), updatedUc) → updatedUc - }.recover { case e ⇒ Result(ErrorRegistry(path.add(origField, tpe), resolveError(e), fields.head.location), None) → uc} - case PartialFutureValue(f) ⇒ + resolveValue(fieldPath, fields, sfield.fieldType, sfield, resolveVal(v), updatedUc) -> updatedUc + }.recover { case e => Result(ErrorRegistry(path.add(origField, tpe), resolveError(e), fields.head.location), None) -> uc} + case PartialFutureValue(f) => f.map{ - case PartialValue(v, es) ⇒ + case PartialValue(v, es) => val updatedUc = resolveUc(v) es foreach resolveError resolveValue(fieldPath, fields, sfield.fieldType, sfield, resolveVal(v), updatedUc) - .appendErrors(fieldPath, es, fields.head.location) → updatedUc - }.recover { case e ⇒ Result(ErrorRegistry(path.add(origField, tpe), resolveError(e), fields.head.location), None) → uc} - case DeferredFutureValue(df) ⇒ + .appendErrors(fieldPath, es, fields.head.location) -> updatedUc + }.recover { case e => Result(ErrorRegistry(path.add(origField, tpe), resolveError(e), fields.head.location), None) -> uc} + case DeferredFutureValue(df) => val p = Promise[(ChildDeferredContext, Any, Vector[Throwable])]() def defer(d: Deferred[Any]) = { val (args, complexity) = calcComplexity(fieldPath, origField, sfield, userContext) @@ -382,41 +382,41 @@ class Resolver[Ctx]( } val actualDeferred = df - .map(d ⇒ Vector(defer(d))) + .map(d => Vector(defer(d))) .recover { - case NonFatal(e) ⇒ + case NonFatal(e) => p.failure(e) Vector.empty } - immediatelyResolveDeferred(uc, DeferredResult(Vector(actualDeferred), p.future.flatMap { case (dctx, v, es) ⇒ + immediatelyResolveDeferred(uc, DeferredResult(Vector(actualDeferred), p.future.flatMap { case (dctx, v, es) => val updatedUc = resolveUc(v) es foreach resolveError resolveValue(fieldPath, fields, sfield.fieldType, sfield, resolveVal(v), updatedUc).appendErrors(fieldPath, es, fields.head.location) match { - case r: Result ⇒ dctx.resolveResult(r.copy(userContext = Some(updatedUc))) - case er: DeferredResult ⇒ dctx.resolveDeferredResult(updatedUc, er).map(_.copy(userContext = Some(updatedUc))) + case r: Result => dctx.resolveResult(r.copy(userContext = Some(updatedUc))) + case er: DeferredResult => dctx.resolveDeferredResult(updatedUc, er).map(_.copy(userContext = Some(updatedUc))) } }.recover { - case e ⇒ Result(ErrorRegistry(fieldPath, resolveError(e), fields.head.location), None) - }), _.map(r ⇒ r → r.userContext.getOrElse(uc))) - case SubscriptionValue(_, _) ⇒ + case e => Result(ErrorRegistry(fieldPath, resolveError(e), fields.head.location), None) + }), _.map(r => r -> r.userContext.getOrElse(uc))) + case SubscriptionValue(_, _) => Future.failed(new IllegalStateException("Subscription values are not supported for normal operations")) - case _: MappedSequenceLeafAction[_, _, _] ⇒ + case _: MappedSequenceLeafAction[_, _, _] => Future.failed(new IllegalStateException("MappedSequenceLeafAction is not supposed to appear here")) } } catch { - case NonFatal(e) ⇒ - Future.successful(Result(ErrorRegistry(fieldPath, resolveError(e), fields.head.location), None) → uc) + case NonFatal(e) => + Future.successful(Result(ErrorRegistry(fieldPath, resolveError(e), fields.head.location), None) -> uc) } resolve.flatMap { - case (r : Result, newUc) ⇒ - Future.successful(accRes.addToMap(r, fields.head.outputName, isOptional(tpe, fields.head.name), fieldPath, fields.head.location, updatedErrors) → newUc) - case (dr : DeferredResult, newUc) ⇒ + case (r : Result, newUc) => + Future.successful(accRes.addToMap(r, fields.head.outputName, isOptional(tpe, fields.head.name), fieldPath, fields.head.location, updatedErrors) -> newUc) + case (dr : DeferredResult, newUc) => immediatelyResolveDeferred(newUc, dr, - _.map(accRes.addToMap(_, fields.head.outputName, isOptional(tpe, fields.head.name), fieldPath, fields.head.location, updatedErrors) → newUc)) + _.map(accRes.addToMap(_, fields.head.outputName, isOptional(tpe, fields.head.name), fieldPath, fields.head.location, updatedErrors) -> newUc)) } } @@ -424,8 +424,8 @@ class Resolver[Ctx]( val args = valueCollector.getFieldArgumentValues(path, Some(astField), field.arguments, astField.arguments, variables) args match { - case Success(a) ⇒ a → field.complexity.fold(DefaultComplexity)(_(uc, a, DefaultComplexity)) - case _ ⇒ Args.empty → DefaultComplexity + case Success(a) => a -> field.complexity.fold(DefaultComplexity)(_(uc, a, DefaultComplexity)) + case _ => Args.empty -> DefaultComplexity } } @@ -437,33 +437,33 @@ class Resolver[Ctx]( errorReg: ErrorRegistry, userCtx: Ctx): Actions = fields.fields.foldLeft((errorReg, Some(Vector.empty)): Actions) { - case (acc @ (_, None), _) ⇒ acc - case (acc, CollectedField(name, origField, _)) if tpe.getField(schema, origField.name).isEmpty ⇒ acc - case ((errors, s @ Some(acc)), CollectedField(name, origField, Failure(error))) ⇒ - errors.add(path.add(origField, tpe), error) → (if (isOptional(tpe, origField.name)) Some(acc :+ (Vector(origField) → None)) else None) - case ((errors, s @ Some(acc)), CollectedField(name, origField, Success(fields))) ⇒ + case (acc @ (_, None), _) => acc + case (acc, CollectedField(name, origField, _)) if tpe.getField(schema, origField.name).isEmpty => acc + case ((errors, s @ Some(acc)), CollectedField(name, origField, Failure(error))) => + errors.add(path.add(origField, tpe), error) -> (if (isOptional(tpe, origField.name)) Some(acc :+ (Vector(origField) -> None)) else None) + case ((errors, s @ Some(acc)), CollectedField(name, origField, Success(fields))) => resolveField(userCtx, tpe, path.add(origField, tpe), value, errors, name, fields) match { - case StandardFieldResolution(updatedErrors, result, updateCtx) ⇒ updatedErrors → Some(acc :+ (fields → Some((tpe.getField(schema, origField.name).head, updateCtx, result)))) - case ErrorFieldResolution(updatedErrors) if isOptional(tpe, origField.name) ⇒ updatedErrors → Some(acc :+ (Vector(origField) → None)) - case ErrorFieldResolution(updatedErrors) ⇒ updatedErrors → None + case StandardFieldResolution(updatedErrors, result, updateCtx) => updatedErrors -> Some(acc :+ (fields -> Some((tpe.getField(schema, origField.name).head, updateCtx, result)))) + case ErrorFieldResolution(updatedErrors) if isOptional(tpe, origField.name) => updatedErrors -> Some(acc :+ (Vector(origField) -> None)) + case ErrorFieldResolution(updatedErrors) => updatedErrors -> None } } private def resolveActionSequenceValues(fieldsPath: ExecutionPath, astFields: Vector[ast.Field], field: Field[Ctx, _], actions: Seq[LeafAction[Any, Any]]): Seq[SeqRes] = actions.map { - case Value(v) ⇒ SeqRes(SeqFutRes(v)) - case TryValue(Success(v)) ⇒ SeqRes(SeqFutRes(v)) - case TryValue(Failure(e)) ⇒ SeqRes(SeqFutRes(errors = Vector(e))) - case PartialValue(v, es) ⇒ SeqRes(SeqFutRes(v, es)) - case FutureValue(future) ⇒ SeqRes(future.map(v ⇒ SeqFutRes(v)).recover {case e ⇒ SeqFutRes(errors = Vector(e))}) - case PartialFutureValue(future) ⇒ SeqRes(future.map{case PartialValue(v, es) ⇒ SeqFutRes(v, es)}.recover {case e ⇒ SeqFutRes(errors = Vector(e))}) - case DeferredValue(deferred) ⇒ + case Value(v) => SeqRes(SeqFutRes(v)) + case TryValue(Success(v)) => SeqRes(SeqFutRes(v)) + case TryValue(Failure(e)) => SeqRes(SeqFutRes(errors = Vector(e))) + case PartialValue(v, es) => SeqRes(SeqFutRes(v, es)) + case FutureValue(future) => SeqRes(future.map(v => SeqFutRes(v)).recover {case e => SeqFutRes(errors = Vector(e))}) + case PartialFutureValue(future) => SeqRes(future.map{case PartialValue(v, es) => SeqFutRes(v, es)}.recover {case e => SeqFutRes(errors = Vector(e))}) + case DeferredValue(deferred) => val promise = Promise[(ChildDeferredContext, Any, Vector[Throwable])]() val (args, complexity) = calcComplexity(fieldsPath, astFields.head, field, userContext) val defer = Defer(promise, deferred, complexity, field, astFields, args) - SeqRes(promise.future map {case (dctx, v, es) ⇒ SeqFutRes(v, es, dctx)} recover {case e ⇒ SeqFutRes(errors = Vector(e))}, defer) - case DeferredFutureValue(deferredValue) ⇒ + SeqRes(promise.future map {case (dctx, v, es) => SeqFutRes(v, es, dctx)} recover {case e => SeqFutRes(errors = Vector(e))}, defer) + case DeferredFutureValue(deferredValue) => val promise = Promise[(ChildDeferredContext, Any, Vector[Throwable])]() def defer(d: Deferred[Any]) = { @@ -472,17 +472,17 @@ class Resolver[Ctx]( } val actualDeferred = deferredValue - .map(d ⇒ Vector(defer(d))) + .map(d => Vector(defer(d))) .recover { - case NonFatal(e) ⇒ + case NonFatal(e) => promise.failure(e) Vector.empty } - SeqRes(promise.future map {case (dctx, v, es) ⇒ SeqFutRes(v, es, dctx)} recover {case e ⇒ SeqFutRes(errors = Vector(e))}, actualDeferred) - case SequenceLeafAction(_) | _: MappedSequenceLeafAction[_, _, _] ⇒ + SeqRes(promise.future map {case (dctx, v, es) => SeqFutRes(v, es, dctx)} recover {case e => SeqFutRes(errors = Vector(e))}, actualDeferred) + case SequenceLeafAction(_) | _: MappedSequenceLeafAction[_, _, _] => SeqRes(SeqFutRes(errors = Vector(new IllegalStateException("Nested `SequenceLeafAction` is not yet supported inside of another `SequenceLeafAction`")))) - case SubscriptionValue(_, _) ⇒ + case SubscriptionValue(_, _) => SeqRes(SeqFutRes(errors = Vector(new IllegalStateException("Subscription values are not supported for normal operations")))) } @@ -495,51 +495,51 @@ class Resolver[Ctx]( try { newUc map (_.onError(e)) } catch { - case NonFatal(ee) ⇒ ee.printStackTrace() + case NonFatal(ee) => ee.printStackTrace() } e } def resolveVal(newUc: Option[MappedCtxUpdate[Ctx, Any, Any]], v: Any) = newUc match { - case Some(MappedCtxUpdate(_, mapFn, _)) ⇒ mapFn(v) - case None ⇒ v + case Some(MappedCtxUpdate(_, mapFn, _)) => mapFn(v) + case None => v } res match { - case None ⇒ Result(errors, None) - case Some(results) ⇒ + case None => Result(errors, None) + case Some(results) => val resolvedValues = results.map { - case (astFields, None) ⇒ astFields.head → Result(ErrorRegistry.empty, None) - case (astFields, Some((field, updateCtx, Value(v)))) ⇒ + case (astFields, None) => astFields.head -> Result(ErrorRegistry.empty, None) + case (astFields, Some((field, updateCtx, Value(v)))) => val fieldsPath = path.add(astFields.head, tpe) try { - astFields.head → resolveValue(fieldsPath, astFields, field.fieldType, field, resolveVal(updateCtx, v), resolveUc(updateCtx, v)) + astFields.head -> resolveValue(fieldsPath, astFields, field.fieldType, field, resolveVal(updateCtx, v), resolveUc(updateCtx, v)) } catch { - case NonFatal(e) ⇒ - astFields.head → Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, e), astFields.head.location), None) + case NonFatal(e) => + astFields.head -> Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, e), astFields.head.location), None) } - case (astFields, Some((field, updateCtx, SequenceLeafAction(actions)))) ⇒ + case (astFields, Some((field, updateCtx, SequenceLeafAction(actions)))) => val fieldsPath = path.add(astFields.head, tpe) val values = resolveActionSequenceValues(fieldsPath, astFields, field, actions) val future = Future.sequence(values.map(_.value)) - val resolved = future.flatMap { vs ⇒ + val resolved = future.flatMap { vs => val errors = vs.flatMap(_.errors).toVector - val successfulValues = vs.collect {case SeqFutRes(v, _, _) if v != null ⇒ v} - val dctx = vs.collect {case SeqFutRes(_, _, d) if d != null ⇒ d} + val successfulValues = vs.collect {case SeqFutRes(v, _, _) if v != null => v} + val dctx = vs.collect {case SeqFutRes(_, _, d) if d != null => d} def resolveDctx(resolve: Resolve) = { val last = dctx.lastOption val init = if (dctx.isEmpty) dctx else dctx.init resolve match { - case res: Result ⇒ + case res: Result => dctx.foreach(_.promise.success(Vector.empty)) Future.successful(res) - case res: DeferredResult ⇒ + case res: DeferredResult => init.foreach(_.promise.success(Vector.empty)) last.foreach(_.promise.success(res.deferred)) res.futureValue @@ -555,131 +555,131 @@ class Resolver[Ctx]( else resolveDctx(Result(ErrorRegistry.empty.append(fieldsPath, errors, astFields.head.location), None)) }.recover { - case e ⇒ Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, e), astFields.head.location), None) + case e => Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, e), astFields.head.location), None) } - val deferred = values.collect {case SeqRes(_, d, _) if d != null ⇒ d}.toVector - val deferredFut = values.collect {case SeqRes(_, _, d) if d != null ⇒ d}.toVector + val deferred = values.collect {case SeqRes(_, d, _) if d != null => d}.toVector + val deferredFut = values.collect {case SeqRes(_, _, d) if d != null => d}.toVector - astFields.head → DeferredResult(Future.successful(deferred) +: deferredFut, resolved) + astFields.head -> DeferredResult(Future.successful(deferred) +: deferredFut, resolved) - case (astFields, Some((field, updateCtx, PartialValue(v, es)))) ⇒ + case (astFields, Some((field, updateCtx, PartialValue(v, es)))) => val fieldsPath = path.add(astFields.head, tpe) es foreach (resolveError(updateCtx, _)) try { - astFields.head → + astFields.head -> resolveValue(fieldsPath, astFields, field.fieldType, field, resolveVal(updateCtx, v), resolveUc(updateCtx, v)) .appendErrors(fieldsPath, es, astFields.head.location) } catch { - case NonFatal(e) ⇒ - astFields.head → Result( + case NonFatal(e) => + astFields.head -> Result( ErrorRegistry(fieldsPath, resolveError(updateCtx, e), astFields.head.location) .append(fieldsPath, es, astFields.head.location), None) } - case (astFields, Some((field, updateCtx, TryValue(v)))) ⇒ + case (astFields, Some((field, updateCtx, TryValue(v)))) => val fieldsPath = path.add(astFields.head, tpe) v match { - case Success(success) ⇒ + case Success(success) => try { - astFields.head → resolveValue(fieldsPath, astFields, field.fieldType, field, resolveVal(updateCtx, success), resolveUc(updateCtx, success)) + astFields.head -> resolveValue(fieldsPath, astFields, field.fieldType, field, resolveVal(updateCtx, success), resolveUc(updateCtx, success)) } catch { - case NonFatal(e) ⇒ - astFields.head → Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, e), astFields.head.location), None) + case NonFatal(e) => + astFields.head -> Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, e), astFields.head.location), None) } - case Failure(e) ⇒ - astFields.head → Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, e), astFields.head.location), None) + case Failure(e) => + astFields.head -> Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, e), astFields.head.location), None) } - case (astFields, Some((field, updateCtx, DeferredValue(deferred)))) ⇒ + case (astFields, Some((field, updateCtx, DeferredValue(deferred)))) => val fieldsPath = path.add(astFields.head, tpe) val promise = Promise[(ChildDeferredContext, Any, Vector[Throwable])]() val (args, complexity) = calcComplexity(fieldsPath, astFields.head, field, userContext) val defer = Defer(promise, deferred, complexity, field, astFields, args) - astFields.head → DeferredResult(Vector(Future.successful(Vector(defer))), + astFields.head -> DeferredResult(Vector(Future.successful(Vector(defer))), promise.future - .flatMap { case (dctx, v, es) ⇒ + .flatMap { case (dctx, v, es) => val uc = resolveUc(updateCtx, v) es foreach (resolveError(updateCtx, _)) resolveValue(fieldsPath, astFields, field.fieldType, field, resolveVal(updateCtx, v), uc).appendErrors(fieldsPath, es, astFields.head.location) match { - case r: Result ⇒ dctx.resolveResult(r) - case er: DeferredResult ⇒ dctx.resolveDeferredResult(uc, er) + case r: Result => dctx.resolveResult(r) + case er: DeferredResult => dctx.resolveDeferredResult(uc, er) } } .recover { - case e ⇒ Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, e), astFields.head.location), None) + case e => Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, e), astFields.head.location), None) }) - case (astFields, Some((field, updateCtx, FutureValue(future)))) ⇒ + case (astFields, Some((field, updateCtx, FutureValue(future)))) => val fieldsPath = path.add(astFields.head, tpe) - val resolved = future.map(v ⇒ resolveValue(fieldsPath, astFields, field.fieldType, field, resolveVal(updateCtx, v), resolveUc(updateCtx, v))).recover { - case e ⇒ Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, e), astFields.head.location), None) + val resolved = future.map(v => resolveValue(fieldsPath, astFields, field.fieldType, field, resolveVal(updateCtx, v), resolveUc(updateCtx, v))).recover { + case e => Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, e), astFields.head.location), None) } def process() = { val deferred = resolved flatMap { - case r: Result ⇒ Future.successful(Vector.empty) - case r: DeferredResult ⇒ Future.sequence(r.deferred) map (_.flatten) + case r: Result => Future.successful(Vector.empty) + case r: DeferredResult => Future.sequence(r.deferred) map (_.flatten) } val value = resolved flatMap { - case r: Result ⇒ Future.successful(r) - case dr: DeferredResult ⇒ dr.futureValue + case r: Result => Future.successful(r) + case dr: DeferredResult => dr.futureValue } - astFields.head → DeferredResult(Vector(deferred), value) + astFields.head -> DeferredResult(Vector(deferred), value) } def processAndResolveDeferred() = { val value = resolved flatMap { - case r: Result ⇒ Future.successful(r) - case dr: DeferredResult ⇒ immediatelyResolveDeferred(userContext, dr, identity) + case r: Result => Future.successful(r) + case dr: DeferredResult => immediatelyResolveDeferred(userContext, dr, identity) } - astFields.head → DeferredResult(Vector.empty, value) + astFields.head -> DeferredResult(Vector.empty, value) } deferredResolver.includeDeferredFromField match { - case Some(fn) ⇒ + case Some(fn) => val (args, complexity) = calcComplexity(fieldsPath, astFields.head, field, userContext) if (fn(field, astFields, args, complexity)) process() else processAndResolveDeferred() - case None ⇒ + case None => process() } - case (astFields, Some((field, updateCtx, PartialFutureValue(future)))) ⇒ + case (astFields, Some((field, updateCtx, PartialFutureValue(future)))) => val fieldsPath = path.add(astFields.head, tpe) - val resolved = future.map {case PartialValue(v, es) ⇒ + val resolved = future.map {case PartialValue(v, es) => es foreach (resolveError(updateCtx, _)) resolveValue(fieldsPath, astFields, field.fieldType, field, resolveVal(updateCtx, v), resolveUc(updateCtx, v)) .appendErrors(fieldsPath, es, astFields.head.location) }.recover { - case e ⇒ Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, e), astFields.head.location), None) + case e => Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, e), astFields.head.location), None) } val deferred = resolved flatMap { - case r: Result ⇒ Future.successful(Vector.empty) - case r: DeferredResult ⇒ Future.sequence(r.deferred) map (_.flatten) + case r: Result => Future.successful(Vector.empty) + case r: DeferredResult => Future.sequence(r.deferred) map (_.flatten) } val value = resolved flatMap { - case r: Result ⇒ Future.successful(r) - case dr: DeferredResult ⇒ dr.futureValue + case r: Result => Future.successful(r) + case dr: DeferredResult => dr.futureValue } - astFields.head → DeferredResult(Vector(deferred), value) + astFields.head -> DeferredResult(Vector(deferred), value) - case (astFields, Some((field, updateCtx, DeferredFutureValue(deferredValue)))) ⇒ + case (astFields, Some((field, updateCtx, DeferredFutureValue(deferredValue)))) => val fieldsPath = path.add(astFields.head, tpe) val promise = Promise[(ChildDeferredContext, Any, Vector[Throwable])]() @@ -689,55 +689,55 @@ class Resolver[Ctx]( } val actualDeferred = deferredValue - .map(d ⇒ Vector(defer(d))) + .map(d => Vector(defer(d))) .recover { - case NonFatal(e) ⇒ + case NonFatal(e) => promise.failure(e) Vector.empty } - astFields.head → DeferredResult(Vector(actualDeferred), - promise.future.flatMap { case (dctx, v, es) ⇒ + astFields.head -> DeferredResult(Vector(actualDeferred), + promise.future.flatMap { case (dctx, v, es) => val uc = resolveUc(updateCtx, v) es foreach (resolveError(updateCtx, _)) resolveValue(fieldsPath, astFields, field.fieldType, field, resolveVal(updateCtx, v), uc).appendErrors(fieldsPath, es, astFields.head.location) match { - case r: Result ⇒ dctx.resolveResult(r) - case er: DeferredResult ⇒ dctx.resolveDeferredResult(uc, er) + case r: Result => dctx.resolveResult(r) + case er: DeferredResult => dctx.resolveDeferredResult(uc, er) } } .recover { - case e ⇒ Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, e), astFields.head.location), None) + case e => Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, e), astFields.head.location), None) }) - case (astFields, Some((_, updateCtx, SubscriptionValue(_, _)))) ⇒ + case (astFields, Some((_, updateCtx, SubscriptionValue(_, _)))) => val fieldsPath = path.add(astFields.head, tpe) val error = new IllegalStateException("Subscription values are not supported for normal operations") - astFields.head → Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, error), astFields.head.location), None) + astFields.head -> Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, error), astFields.head.location), None) - case (astFields, Some((_, updateCtx, _: MappedSequenceLeafAction[_, _, _]))) ⇒ + case (astFields, Some((_, updateCtx, _: MappedSequenceLeafAction[_, _, _]))) => val fieldsPath = path.add(astFields.head, tpe) val error = new IllegalStateException("MappedSequenceLeafAction is not supposed to appear here") - astFields.head → Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, error), astFields.head.location), None) + astFields.head -> Result(ErrorRegistry(fieldsPath, resolveError(updateCtx, error), astFields.head.location), None) } - val simpleRes = resolvedValues.collect {case (af, r: Result) ⇒ af → r} + val simpleRes = resolvedValues.collect {case (af, r: Result) => af -> r} val resSoFar = simpleRes.foldLeft(Result(errors, Some(marshaller.emptyMapNode(fieldsNamesOrdered)))) { - case (res, (astField, other)) ⇒ res addToMap (other, astField.outputName, isOptional(tpe, astField.name), path.add(astField, tpe), astField.location, res.errors) + case (res, (astField, other)) => res addToMap (other, astField.outputName, isOptional(tpe, astField.name), path.add(astField, tpe), astField.location, res.errors) } - val complexRes = resolvedValues.collect{case (af, r: DeferredResult) ⇒ af → r} + val complexRes = resolvedValues.collect{case (af, r: DeferredResult) => af -> r} if (complexRes.isEmpty) resSoFar.buildValue else { val allDeferred = complexRes.flatMap(_._2.deferred) - val finalValue = Future.sequence(complexRes.map {case (astField, DeferredResult(_, future)) ⇒ future map (astField → _)}) map { results ⇒ + val finalValue = Future.sequence(complexRes.map {case (astField, DeferredResult(_, future)) => future map (astField -> _)}) map { results => results.foldLeft(resSoFar) { - case (res, (astField, other)) ⇒ res addToMap (other, astField.outputName, isOptional(tpe, astField.name), path.add(astField, tpe), astField.location, res.errors) + case (res, (astField, other)) => res addToMap (other, astField.outputName, isOptional(tpe, astField.name), path.add(astField, tpe), astField.location, res.errors) }.buildValue } @@ -749,30 +749,30 @@ class Resolver[Ctx]( private def resolveDeferred(uc: Ctx, toResolve: Vector[Defer]) = if (toResolve.nonEmpty) { def findActualDeferred(deferred: Deferred[_]): Deferred[_] = deferred match { - case MappingDeferred(d, _) ⇒ findActualDeferred(d) - case d ⇒ d + case MappingDeferred(d, _) => findActualDeferred(d) + case d => d } def mapAllDeferred(deferred: Deferred[_], value: Future[Any]): Future[(Any, Vector[Throwable])] = deferred match { - case MappingDeferred(d, fn) ⇒ mapAllDeferred(d, value) map { - case (v, errors) ⇒ + case MappingDeferred(d, fn) => mapAllDeferred(d, value) map { + case (v, errors) => val (mappedV, newErrors) = fn(v) - mappedV → (errors ++ newErrors) + mappedV -> (errors ++ newErrors) } - case _ ⇒ value.map(_→ Vector.empty) + case _ => value.map(_-> Vector.empty) } try { - val resolved = deferredResolver.resolve(toResolve map (d ⇒ findActualDeferred(d.deferred)), uc, deferredResolverState) + val resolved = deferredResolver.resolve(toResolve map (d => findActualDeferred(d.deferred)), uc, deferredResolverState) if (toResolve.size == resolved.size) { val dctx = ParentDeferredContext(uc, toResolve.size) - for (i ← toResolve.indices) { + for (i <- toResolve.indices) { val toRes = toResolve(i) - toRes.promise tryCompleteWith mapAllDeferred(toRes.deferred, resolved(i)).map(v ⇒ (dctx.children(i), v._1, v._2)).recover { - case NonFatal(e) ⇒ + toRes.promise tryCompleteWith mapAllDeferred(toRes.deferred, resolved(i)).map(v => (dctx.children(i), v._1, v._2)).recover { + case NonFatal(e) => dctx.children(i).resolveError(e) throw e } @@ -784,7 +784,7 @@ class Resolver[Ctx]( new IllegalStateException(s"Deferred resolver returned ${resolved.size} elements, but it got ${toResolve.size} deferred values. This violates the contract. You can find more information in the documentation: http://sangria-graphql.org/learn/#deferred-values-and-resolver"))) } } catch { - case NonFatal(error) ⇒ toResolve foreach (_.promise.failure(error)) + case NonFatal(error) => toResolve foreach (_.promise.failure(error)) } } @@ -797,30 +797,30 @@ class Resolver[Ctx]( userCtx: Ctx, actualType: Option[InputType[_]] = None): Resolve = tpe match { - case OptionType(optTpe) ⇒ + case OptionType(optTpe) => val actualValue = value match { - case v: Option[_] ⇒ v - case v ⇒ Option(v) + case v: Option[_] => v + case v => Option(v) } actualValue match { - case Some(someValue) ⇒ resolveValue(path, astFields, optTpe, field, someValue, userCtx) - case None ⇒ Result(ErrorRegistry.empty, None) + case Some(someValue) => resolveValue(path, astFields, optTpe, field, someValue, userCtx) + case None => Result(ErrorRegistry.empty, None) } - case ListType(listTpe) ⇒ + case ListType(listTpe) => if (isUndefinedValue(value)) Result(ErrorRegistry.empty, None) else { val actualValue = value match { - case seq: Seq[_] ⇒ seq - case other ⇒ Seq(other) + case seq: Seq[_] => seq + case other => Seq(other) } val res = actualValue.zipWithIndex map { - case (v, idx) ⇒ resolveValue(path withIndex idx, astFields, listTpe, field, v, userCtx) + case (v, idx) => resolveValue(path withIndex idx, astFields, listTpe, field, v, userCtx) } - val simpleRes = res.collect { case r: Result ⇒ r} + val simpleRes = res.collect { case r: Result => r} val optional = isOptional(listTpe) if (simpleRes.size == res.size) @@ -834,9 +834,9 @@ class Resolver[Ctx]( while(resIt.hasNext) { resIt.next() match { - case r: Result ⇒ + case r: Result => resultFutures += Future.successful(r) - case dr: DeferredResult ⇒ + case dr: DeferredResult => resultFutures += dr.futureValue deferredBuilder ++= dr.deferred } @@ -849,7 +849,7 @@ class Resolver[Ctx]( ) } } - case scalar: ScalarType[Any @unchecked] ⇒ + case scalar: ScalarType[Any @unchecked] => try { Result(ErrorRegistry.empty, if (isUndefinedValue(value)) @@ -862,19 +862,19 @@ class Resolver[Ctx]( } else { val coercedWithMiddleware = toScalarMiddleware match { - case Some(fn) ⇒ fn(coerced, actualType getOrElse scalar) getOrElse coerced - case None ⇒ coerced + case Some(fn) => fn(coerced, actualType getOrElse scalar) getOrElse coerced + case None => coerced } Some(marshalScalarValue(coercedWithMiddleware, marshaller, scalar.name, scalar.scalarInfo)) } }) } catch { - case NonFatal(e) ⇒ Result(ErrorRegistry(path, e), None) + case NonFatal(e) => Result(ErrorRegistry(path, e), None) } - case scalar: ScalarAlias[Any @unchecked, Any @unchecked] ⇒ + case scalar: ScalarAlias[Any @unchecked, Any @unchecked] => resolveValue(path, astFields, scalar.aliasFor, field, scalar.toScalar(value), userCtx, Some(scalar)) - case enum: EnumType[Any @unchecked] ⇒ + case enum: EnumType[Any @unchecked] => try { Result(ErrorRegistry.empty, if (isUndefinedValue(value)) @@ -888,32 +888,32 @@ class Resolver[Ctx]( Some(marshalEnumValue(coerced, marshaller, enum.name)) }) } catch { - case NonFatal(e) ⇒ Result(ErrorRegistry(path, e), None) + case NonFatal(e) => Result(ErrorRegistry(path, e), None) } - case obj: ObjectType[Ctx, _] ⇒ + case obj: ObjectType[Ctx, _] => if (isUndefinedValue(value)) Result(ErrorRegistry.empty, None) else fieldCollector.collectFields(path, obj, astFields) match { - case Success(fields) ⇒ + case Success(fields) => val actions = collectActionsPar(path, obj, value, fields, ErrorRegistry.empty, userCtx) resolveActionsPar(path, obj, actions, userCtx, fields.namesOrdered) - case Failure(error) ⇒ Result(ErrorRegistry(path, error), None) + case Failure(error) => Result(ErrorRegistry(path, error), None) } - case abst: AbstractType ⇒ + case abst: AbstractType => if (isUndefinedValue(value)) Result(ErrorRegistry.empty, None) else { val actualValue = abst match { - case abst: MappedAbstractType[Any @unchecked] ⇒ abst.contraMap(value) - case _ ⇒ value + case abst: MappedAbstractType[Any @unchecked] => abst.contraMap(value) + case _ => value } abst.typeOf(actualValue, schema) match { - case Some(obj) ⇒ resolveValue(path, astFields, obj, field, actualValue, userCtx) - case None ⇒ Result(ErrorRegistry(path, + case Some(obj) => resolveValue(path, astFields, obj, field, actualValue, userCtx) + case None => Result(ErrorRegistry(path, UndefinedConcreteTypeError(path, abst, schema.possibleTypes.getOrElse(abst.name, Vector.empty), actualValue, exceptionHandler, sourceMapper, astFields.head.location.toList)), None) } } @@ -939,11 +939,11 @@ class Resolver[Ctx]( errorReg = errorReg.add(res.errors) res.nodeValue match { - case node if optional ⇒ + case node if optional => listBuilder += marshaller.optionalArrayNodeValue(node) - case Some(other) ⇒ + case Some(other) => listBuilder += other - case None ⇒ + case None => canceled = true } } @@ -964,10 +964,10 @@ class Resolver[Ctx]( val field = allFields.head maxQueryDepth match { - case Some(max) if path.size > max ⇒ ErrorFieldResolution(errors.add(path, new MaxQueryDepthReachedError(max), astField.location)) - case _ ⇒ + case Some(max) if path.size > max => ErrorFieldResolution(errors.add(path, new MaxQueryDepthReachedError(max), astField.location)) + case _ => valueCollector.getFieldArgumentValues(path, Some(astField), field.arguments, astField.arguments, variables) match { - case Success(args) ⇒ + case Success(args) => val ctx = Context[Ctx, Any]( value, userCtx, @@ -988,12 +988,12 @@ class Resolver[Ctx]( try { val mBefore = middleware collect { - case (mv, m: MiddlewareBeforeField[Ctx]) ⇒ + case (mv, m: MiddlewareBeforeField[Ctx]) => (m.beforeField(mv.asInstanceOf[m.QueryVal], middlewareCtx, ctx), mv, m) } - val beforeAction = mBefore.collect{case (BeforeFieldResult(_, Some(action), _), _, _) ⇒ action}.lastOption - val beforeAttachments = mBefore.collect{case (BeforeFieldResult(_, _, Some(att)), _, _) ⇒ att}.toVector + val beforeAction = mBefore.collect{case (BeforeFieldResult(_, Some(action), _), _, _) => action}.lastOption + val beforeAttachments = mBefore.collect{case (BeforeFieldResult(_, _, Some(att)), _, _) => att}.toVector val updatedCtx = if (beforeAttachments.nonEmpty) ctx.copy(middlewareAttachments = beforeAttachments) else ctx @@ -1003,39 +1003,39 @@ class Resolver[Ctx]( def doAfterMiddleware[Val](v: Val): Val = mAfter.foldLeft(v) { - case (acc, (BeforeFieldResult(cv, _, _), mv, m: MiddlewareAfterField[Ctx])) ⇒ + case (acc, (BeforeFieldResult(cv, _, _), mv, m: MiddlewareAfterField[Ctx])) => m.afterField(mv.asInstanceOf[m.QueryVal], cv.asInstanceOf[m.FieldVal], acc, middlewareCtx, updatedCtx).asInstanceOf[Option[Val]] getOrElse acc - case (acc, _) ⇒ acc + case (acc, _) => acc } def doErrorMiddleware(error: Throwable): Unit = mError.collect { - case (BeforeFieldResult(cv, _, _), mv, m: MiddlewareErrorField[Ctx]) ⇒ + case (BeforeFieldResult(cv, _, _), mv, m: MiddlewareErrorField[Ctx]) => m.fieldError(mv.asInstanceOf[m.QueryVal], cv.asInstanceOf[m.FieldVal], error, middlewareCtx, updatedCtx) } - def doAfterMiddlewareWithMap[Val, NewVal](fn: Val ⇒ NewVal)(v: Val): NewVal = + def doAfterMiddlewareWithMap[Val, NewVal](fn: Val => NewVal)(v: Val): NewVal = mAfter.foldLeft(fn(v)) { - case (acc, (BeforeFieldResult(cv, _, _), mv, m: MiddlewareAfterField[Ctx])) ⇒ + case (acc, (BeforeFieldResult(cv, _, _), mv, m: MiddlewareAfterField[Ctx])) => m.afterField(mv.asInstanceOf[m.QueryVal], cv.asInstanceOf[m.FieldVal], acc, middlewareCtx, updatedCtx).asInstanceOf[Option[NewVal]] getOrElse acc - case (acc, _) ⇒ acc + case (acc, _) => acc } try { val res = beforeAction match { - case Some(action) ⇒ action - case None ⇒ + case Some(action) => action + case None => field.resolve match { - case pfn: Projector[Ctx, _, _] ⇒ pfn(updatedCtx, collectProjections(path, field, astFields, pfn.maxLevel)) - case fn ⇒ fn(updatedCtx) + case pfn: Projector[Ctx, _, _] => pfn(updatedCtx, collectProjections(path, field, astFields, pfn.maxLevel)) + case fn => fn(updatedCtx) } } def createResolution(result: Any): StandardFieldResolution = result match { // these specific cases are important for time measuring middleware and eager values - case resolved: Value[Ctx, Any @unchecked] ⇒ + case resolved: Value[Ctx, Any @unchecked] => StandardFieldResolution( errors, if (mAfter.nonEmpty) @@ -1044,7 +1044,7 @@ class Resolver[Ctx]( resolved, None) - case resolved: PartialValue[Ctx, Any @unchecked] ⇒ + case resolved: PartialValue[Ctx, Any @unchecked] => StandardFieldResolution( errors, if (mAfter.nonEmpty) @@ -1053,12 +1053,12 @@ class Resolver[Ctx]( resolved, if (mError.nonEmpty) Some(MappedCtxUpdate( - _ ⇒ userCtx, + _ => userCtx, identity, doErrorMiddleware)) else None) - case resolved: TryValue[Ctx, Any @unchecked] ⇒ + case resolved: TryValue[Ctx, Any @unchecked] => StandardFieldResolution( errors, if (mAfter.nonEmpty && resolved.value.isSuccess) @@ -1067,43 +1067,43 @@ class Resolver[Ctx]( resolved, if (mError.nonEmpty) Some(MappedCtxUpdate( - _ ⇒ userCtx, + _ => userCtx, identity, doErrorMiddleware)) else None) - case res: SequenceLeafAction[Ctx, _] ⇒ + case res: SequenceLeafAction[Ctx, _] => StandardFieldResolution( errors, res, Some(MappedCtxUpdate( - _ ⇒ userCtx, + _ => userCtx, if (mAfter.nonEmpty) doAfterMiddleware else identity, if (mError.nonEmpty) doErrorMiddleware else identity))) - case res: MappedSequenceLeafAction[Ctx, Any @unchecked, Any @unchecked] ⇒ - val mapFn = res.mapFn.asInstanceOf[Any ⇒ Any] + case res: MappedSequenceLeafAction[Ctx, Any @unchecked, Any @unchecked] => + val mapFn = res.mapFn.asInstanceOf[Any => Any] StandardFieldResolution( errors, res.action, Some(MappedCtxUpdate( - _ ⇒ userCtx, + _ => userCtx, if (mAfter.nonEmpty) doAfterMiddlewareWithMap(mapFn) else mapFn, if (mError.nonEmpty) doErrorMiddleware else identity))) - case resolved: LeafAction[Ctx, Any @unchecked] ⇒ + case resolved: LeafAction[Ctx, Any @unchecked] => StandardFieldResolution( errors, resolved, if (mAfter.nonEmpty || mError.nonEmpty) Some(MappedCtxUpdate( - _ ⇒ userCtx, + _ => userCtx, if (mAfter.nonEmpty) doAfterMiddleware else identity, if (mError.nonEmpty) doErrorMiddleware else identity)) else None) - case res: UpdateCtx[Ctx, Any @unchecked] ⇒ + case res: UpdateCtx[Ctx, Any @unchecked] => StandardFieldResolution( errors, res.action, @@ -1112,7 +1112,7 @@ class Resolver[Ctx]( if (mAfter.nonEmpty) doAfterMiddleware else identity, if (mError.nonEmpty) doErrorMiddleware else identity))) - case res: MappedUpdateCtx[Ctx, Any @unchecked, Any @unchecked] ⇒ + case res: MappedUpdateCtx[Ctx, Any @unchecked, Any @unchecked] => StandardFieldResolution( errors, res.action, @@ -1123,23 +1123,23 @@ class Resolver[Ctx]( } res match { - case s: SubscriptionValue[Ctx, _, _] ⇒ StreamFieldResolution(errors, s, createResolution) - case _ ⇒ createResolution(res) + case s: SubscriptionValue[Ctx, _, _] => StreamFieldResolution(errors, s, createResolution) + case _ => createResolution(res) } } catch { - case NonFatal(e) ⇒ + case NonFatal(e) => try { if (mError.nonEmpty) doErrorMiddleware(e) ErrorFieldResolution(errors.add(path, e, astField.location)) } catch { - case NonFatal(me) ⇒ ErrorFieldResolution(errors.add(path, e, astField.location).add(path, me, astField.location)) + case NonFatal(me) => ErrorFieldResolution(errors.add(path, e, astField.location).add(path, me, astField.location)) } } } catch { - case NonFatal(e) ⇒ ErrorFieldResolution(errors.add(path, e, astField.location)) + case NonFatal(e) => ErrorFieldResolution(errors.add(path, e, astField.location)) } - case Failure(error) ⇒ ErrorFieldResolution(errors.add(path, error)) + case Failure(error) => ErrorFieldResolution(errors.add(path, error)) } } } @@ -1148,34 +1148,34 @@ class Resolver[Ctx]( def loop(path: ExecutionPath, tpe: OutputType[_], astFields: Vector[ast.Field], currLevel: Int): Vector[ProjectedName] = if (currLevel > maxLevel) Vector.empty else tpe match { - case OptionType(ofType) ⇒ loop(path, ofType, astFields, currLevel) - case ListType(ofType) ⇒ loop(path, ofType, astFields, currLevel) - case objTpe: ObjectType[Ctx, _] ⇒ + case OptionType(ofType) => loop(path, ofType, astFields, currLevel) + case ListType(ofType) => loop(path, ofType, astFields, currLevel) + case objTpe: ObjectType[Ctx, _] => fieldCollector.collectFields(path, objTpe, astFields) match { - case Success(ff) ⇒ + case Success(ff) => ff.fields .collect { - case CollectedField(_, _, Success(fields)) if objTpe.getField(schema, fields.head.name).nonEmpty && !objTpe.getField(schema, fields.head.name).head.tags.contains(ProjectionExclude) ⇒ + case CollectedField(_, _, Success(fields)) if objTpe.getField(schema, fields.head.name).nonEmpty && !objTpe.getField(schema, fields.head.name).head.tags.contains(ProjectionExclude) => val astField = fields.head val field = objTpe.getField(schema, astField.name).head - val projectionNames = field.tags collect {case ProjectionName(name) ⇒ name} + val projectionNames = field.tags collect {case ProjectionName(name) => name} val projectedName = if (projectionNames.nonEmpty) projectionNames.toVector else Vector(field.name) - projectedName.map (name ⇒ + projectedName.map (name => ProjectedName(name, loop(path.add(astField, objTpe), field.fieldType, fields, currLevel + 1))) } .flatten - case Failure(_) ⇒ Vector.empty + case Failure(_) => Vector.empty } - case abst: AbstractType ⇒ + case abst: AbstractType => schema.possibleTypes .get (abst.name) .map (_.flatMap(loop(path, _, astFields, currLevel + 1)).groupBy(_.name).map(_._2.head).toVector) .getOrElse (Vector.empty) - case _ ⇒ Vector.empty + case _ => Vector.empty } loop(path, field.fieldType, astFields, 1) @@ -1211,9 +1211,9 @@ class Resolver[Ctx]( updatedErrors.add(other.errors), value = if (optional && other.value.isEmpty) - value map (v ⇒ marshaller.addMapNodeElem(v.asInstanceOf[marshaller.MapBuilder], key, marshaller.nullNode, optional = false)) + value map (v => marshaller.addMapNodeElem(v.asInstanceOf[marshaller.MapBuilder], key, marshaller.nullNode, optional = false)) else - for {myVal ← value; otherVal ← other.value} yield marshaller.addMapNodeElem(myVal.asInstanceOf[marshaller.MapBuilder], key, otherVal.asInstanceOf[marshaller.Node], optional = false)) + for {myVal <- value; otherVal <- other.value} yield marshaller.addMapNodeElem(myVal.asInstanceOf[marshaller.MapBuilder], key, otherVal.asInstanceOf[marshaller.Node], optional = false)) def nodeValue = value.asInstanceOf[Option[marshaller.Node]] def builderValue = value.asInstanceOf[Option[marshaller.MapBuilder]] @@ -1229,12 +1229,12 @@ class Resolver[Ctx]( ChildDeferredContext(Promise[Vector[Future[Vector[Defer]]]]())) def init(): Unit = - Future.sequence(children.map(_.promise.future)).onComplete { res ⇒ + Future.sequence(children.map(_.promise.future)).onComplete { res => val allDeferred = res.get.flatten if (allDeferred.nonEmpty) - resolveDeferredWithGrouping(allDeferred).foreach(groups ⇒ - groups.foreach(group ⇒ resolveDeferred(uc, group))) + resolveDeferredWithGrouping(allDeferred).foreach(groups => + groups.foreach(group => resolveDeferred(uc, group))) } } @@ -1257,7 +1257,7 @@ class Resolver[Ctx]( sealed trait FieldResolution case class ErrorFieldResolution(errors: ErrorRegistry) extends FieldResolution case class StandardFieldResolution(errors: ErrorRegistry, action: LeafAction[Ctx, Any], ctxUpdate: Option[MappedCtxUpdate[Ctx, Any, Any]]) extends FieldResolution - case class StreamFieldResolution[Val, S[_]](errors: ErrorRegistry, value: SubscriptionValue[Ctx, Val, S], standardResolution: Any ⇒ StandardFieldResolution) extends FieldResolution + case class StreamFieldResolution[Val, S[_]](errors: ErrorRegistry, value: SubscriptionValue[Ctx, Val, S], standardResolution: Any => StandardFieldResolution) extends FieldResolution case class SeqRes(value: Future[SeqFutRes], defer: Defer, deferFut: Future[Vector[Defer]]) @@ -1274,7 +1274,7 @@ class Resolver[Ctx]( case class SeqFutRes(value: Any = null, errors: Vector[Throwable] = Vector.empty, dctx: ChildDeferredContext = null) } -case class MappedCtxUpdate[Ctx, Val, NewVal](ctxFn: Val ⇒ Ctx, mapFn: Val ⇒ NewVal, onError: Throwable ⇒ Unit) +case class MappedCtxUpdate[Ctx, Val, NewVal](ctxFn: Val => Ctx, mapFn: Val => NewVal, onError: Throwable => Unit) object Resolver { val DefaultComplexity = 1.0D @@ -1284,23 +1284,23 @@ object Resolver { def marshalScalarValue(value: Any, marshaller: ResultMarshaller, typeName: String, scalarInfo: Set[ScalarValueInfo]): marshaller.Node = value match { - case astValue: ast.Value ⇒ marshalAstValue(astValue, marshaller, typeName, scalarInfo) - case null ⇒ marshaller.nullNode - case v ⇒ marshaller.scalarNode(value, typeName, scalarInfo) + case astValue: ast.Value => marshalAstValue(astValue, marshaller, typeName, scalarInfo) + case null => marshaller.nullNode + case v => marshaller.scalarNode(value, typeName, scalarInfo) } def marshalAstValue(value: ast.Value, marshaller: ResultMarshaller, typeName: String, scalarInfo: Set[ScalarValueInfo]): marshaller.Node = value match { - case ast.StringValue(str, _, _, _, _) ⇒ marshaller.scalarNode(str, typeName, scalarInfo) - case ast.IntValue(i, _, _) ⇒ marshaller.scalarNode(i, typeName, scalarInfo) - case ast.BigIntValue(i, _, _) ⇒ marshaller.scalarNode(i, typeName, scalarInfo) - case ast.FloatValue(f, _, _) ⇒ marshaller.scalarNode(f, typeName, scalarInfo) - case ast.BigDecimalValue(f, _, _) ⇒ marshaller.scalarNode(f, typeName, scalarInfo) - case ast.BooleanValue(b, _, _) ⇒ marshaller.scalarNode(b, typeName, scalarInfo) - case ast.NullValue(_, _) ⇒ marshaller.nullNode - case ast.EnumValue(enum, _, _) ⇒ marshaller.enumNode(enum, typeName) - case ast.ListValue(values, _, _) ⇒ marshaller.arrayNode(values map (marshalAstValue(_, marshaller, typeName, scalarInfo))) - case ast.ObjectValue(values, _, _) ⇒ marshaller.mapNode(values map (v ⇒ v.name → marshalAstValue(v.value, marshaller, typeName, scalarInfo))) - case ast.VariableValue(name, _, _) ⇒ marshaller.enumNode(name, typeName) + case ast.StringValue(str, _, _, _, _) => marshaller.scalarNode(str, typeName, scalarInfo) + case ast.IntValue(i, _, _) => marshaller.scalarNode(i, typeName, scalarInfo) + case ast.BigIntValue(i, _, _) => marshaller.scalarNode(i, typeName, scalarInfo) + case ast.FloatValue(f, _, _) => marshaller.scalarNode(f, typeName, scalarInfo) + case ast.BigDecimalValue(f, _, _) => marshaller.scalarNode(f, typeName, scalarInfo) + case ast.BooleanValue(b, _, _) => marshaller.scalarNode(b, typeName, scalarInfo) + case ast.NullValue(_, _) => marshaller.nullNode + case ast.EnumValue(enum, _, _) => marshaller.enumNode(enum, typeName) + case ast.ListValue(values, _, _) => marshaller.arrayNode(values map (marshalAstValue(_, marshaller, typeName, scalarInfo))) + case ast.ObjectValue(values, _, _) => marshaller.mapNode(values map (v => v.name -> marshalAstValue(v.value, marshaller, typeName, scalarInfo))) + case ast.VariableValue(name, _, _) => marshaller.enumNode(name, typeName) } } @@ -1310,4 +1310,4 @@ trait DeferredWithInfo { def field: Field[_, _] def astFields: Vector[ast.Field] def args: Args -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/execution/ResultResolver.scala b/src/main/scala/sangria/execution/ResultResolver.scala index 68de6049..d77ff817 100644 --- a/src/main/scala/sangria/execution/ResultResolver.scala +++ b/src/main/scala/sangria/execution/ResultResolver.scala @@ -18,18 +18,18 @@ class ResultResolver(val marshaller: ResultMarshaller, exceptionHandler: Excepti val empty = marshaller.emptyMapNode(names) val withData = data match { - case Some(d) ⇒ marshaller.addMapNodeElem(empty, "data", d, optional = false) - case None ⇒ marshaller.addMapNodeElem(empty, "data", marshaller.nullNode, optional = false) + case Some(d) => marshaller.addMapNodeElem(empty, "data", d, optional = false) + case None => marshaller.addMapNodeElem(empty, "data", marshaller.nullNode, optional = false) } val withErrors = errors match { - case Some(e) ⇒ marshaller.addMapNodeElem(withData, "errors", e, optional = false) - case None ⇒ withData + case Some(e) => marshaller.addMapNodeElem(withData, "errors", e, optional = false) + case None => withData } val withExtensions = extensions match { - case Some(e) ⇒ marshaller.addMapNodeElem(withErrors, "extensions", e, optional = true) - case None ⇒ withErrors + case Some(e) => marshaller.addMapNodeElem(withErrors, "extensions", e, optional = true) + case None => withErrors } marshaller.mapNode(withExtensions) @@ -40,7 +40,7 @@ class ResultResolver(val marshaller: ResultMarshaller, exceptionHandler: Excepti def handleSupportedError(path: ExecutionPath, handledException: HandledException, locations: List[AstLocation]) = { handledException match { - case SingleHandledException(message, additionalFields, newLocations, addFieldsInExtensions, addFieldsInError) ⇒ + case SingleHandledException(message, additionalFields, newLocations, addFieldsInExtensions, addFieldsInError) => val msg = if (message == null) "" else message val af = additionalFields.toSeq.asInstanceOf[Seq[(String, marshaller.Node)]] @@ -48,8 +48,8 @@ class ResultResolver(val marshaller: ResultMarshaller, exceptionHandler: Excepti errorNode(msg, path, locations ++ newLocations, if (addFieldsInError) af else Seq.empty, if (addFieldsInExtensions) af else Seq.empty)) - case MultipleHandledExceptions(messages, addFieldsInExtensions, addFieldsInError) ⇒ - messages.map { case (message, additionalFields, newLocations) ⇒ + case MultipleHandledExceptions(messages, addFieldsInExtensions, addFieldsInError) => + messages.map { case (message, additionalFields, newLocations) => val msg = if (message == null) "" else message val af = additionalFields.toSeq.asInstanceOf[Seq[(String, marshaller.Node)]] @@ -62,19 +62,19 @@ class ResultResolver(val marshaller: ResultMarshaller, exceptionHandler: Excepti private def getLocations(violation: Violation): List[AstLocation] = violation match { - case v: AstNodeLocation if v.locations.nonEmpty ⇒ v.locations - case _ ⇒ Nil + case v: AstNodeLocation if v.locations.nonEmpty => v.locations + case _ => Nil } private def getLocations(error: Throwable): List[AstLocation] = error match { - case error: AstNodeLocation if error.locations.nonEmpty ⇒ error.locations - case _ ⇒ Nil + case error: AstNodeLocation if error.locations.nonEmpty => error.locations + case _ => Nil } private def createLocation(loc: AstLocation) = marshaller.mapNode(Seq( - "line" → marshaller.scalarNode(loc.line, "Int", Set.empty), - "column" → marshaller.scalarNode(loc.column, "Int", Set.empty))) + "line" -> marshaller.scalarNode(loc.line, "Int", Set.empty), + "column" -> marshaller.scalarNode(loc.column, "Int", Set.empty))) private def errorNode(message: String, path: ExecutionPath, positions: List[AstLocation], additionalFields: Seq[(String, marshaller.Node)] = Nil, additionalExtensionFields: Seq[(String, marshaller.Node)] = Nil): marshaller.Node = mapNode( @@ -82,25 +82,25 @@ class ResultResolver(val marshaller: ResultMarshaller, exceptionHandler: Excepti pathFields(path) ++ positionFields(positions) ++ additionalFields ++ - (if (additionalExtensionFields.nonEmpty) Seq("extensions" → mapNode(additionalExtensionFields)) else Seq.empty)) + (if (additionalExtensionFields.nonEmpty) Seq("extensions" -> mapNode(additionalExtensionFields)) else Seq.empty)) private def mapNode(fields: Seq[(String, marshaller.Node)]): marshaller.Node = marshaller.mapNode(fields.foldLeft(marshaller.emptyMapNode(fields.map(_._1))) { - case (acc, (name, value)) ⇒ marshaller.addMapNodeElem(acc, name, value, optional = false) + case (acc, (name, value)) => marshaller.addMapNodeElem(acc, name, value, optional = false) }) private def messageFields(message: String): Seq[(String, marshaller.Node)] = - Seq("message" → marshaller.scalarNode(message, "String", Set.empty)) + Seq("message" -> marshaller.scalarNode(message, "String", Set.empty)) private def pathFields(path: ExecutionPath): Seq[(String, marshaller.Node)] = if (path.nonEmpty) - Seq("path" → path.marshal(marshaller)) + Seq("path" -> path.marshal(marshaller)) else Seq.empty private def positionFields(positions: List[AstLocation]): Seq[(String, marshaller.Node)] = if (positions.nonEmpty) - Seq("locations" → marshallPositions(positions)) + Seq("locations" -> marshallPositions(positions)) else Seq.empty @@ -118,8 +118,8 @@ class ResultResolver(val marshaller: ResultMarshaller, exceptionHandler: Excepti def append(path: ExecutionPath, errors: Vector[Throwable], position: Option[AstLocation]) = copy( - errors.flatMap(e ⇒ createErrorPaths(path, e, position)) ++ errorList, - if (preserveOriginalErrors) errors.map(e ⇒ RegisteredError(path, e, position)) ++ originalErrors else originalErrors) + errors.flatMap(e => createErrorPaths(path, e, position)) ++ errorList, + if (preserveOriginalErrors) errors.map(e => RegisteredError(path, e, position)) ++ originalErrors else originalErrors) def add(path: ExecutionPath, error: Throwable, position: Option[AstLocation]) = copy( @@ -133,27 +133,27 @@ class ResultResolver(val marshaller: ResultMarshaller, exceptionHandler: Excepti private def createErrorPaths(path: ExecutionPath, error: Throwable, position: Option[AstLocation]) = error match { - case e: WithViolations if e.violations.nonEmpty ⇒ + case e: WithViolations if e.violations.nonEmpty => e.violations flatMap { - case v if exceptionHandler.onViolation.isDefinedAt(marshaller → v) ⇒ - handleSupportedError(path, exceptionHandler.onViolation(marshaller → v), position.toList ++ getLocations(v)) - case v ⇒ + case v if exceptionHandler.onViolation.isDefinedAt(marshaller -> v) => + handleSupportedError(path, exceptionHandler.onViolation(marshaller -> v), position.toList ++ getLocations(v)) + case v => Vector(errorNode(v.errorMessage, path, position.toList ++ getLocations(v))) } - case e: UserFacingError if exceptionHandler.onUserFacingError isDefinedAt (marshaller → e) ⇒ - handleSupportedError(path, exceptionHandler.onUserFacingError(marshaller → e), position.toList ++ getLocations(e)) + case e: UserFacingError if exceptionHandler.onUserFacingError isDefinedAt (marshaller -> e) => + handleSupportedError(path, exceptionHandler.onUserFacingError(marshaller -> e), position.toList ++ getLocations(e)) - case e: UserFacingError ⇒ + case e: UserFacingError => Vector(errorNode(e.getMessage, path, position.toList ++ getLocations(e))) - case e if exceptionHandler.onException isDefinedAt (marshaller → e) ⇒ - handleSupportedError(path, exceptionHandler.onException(marshaller → e), position.toList ++ getLocations(e)) + case e if exceptionHandler.onException isDefinedAt (marshaller -> e) => + handleSupportedError(path, exceptionHandler.onException(marshaller -> e), position.toList ++ getLocations(e)) - case QueryReducingError(cause, _) if exceptionHandler.onException isDefinedAt (marshaller → cause) ⇒ - handleSupportedError(path, exceptionHandler.onException(marshaller → cause), position.toList ++ getLocations(cause)) + case QueryReducingError(cause, _) if exceptionHandler.onException isDefinedAt (marshaller -> cause) => + handleSupportedError(path, exceptionHandler.onException(marshaller -> cause), position.toList ++ getLocations(cause)) - case e ⇒ + case e => e.printStackTrace() Vector(errorNode("Internal server error", path, position.toList ++ getLocations(e))) @@ -176,14 +176,14 @@ object ResultResolver { implicit val m = SimpleResultMarshallerForType[marshaller.Node](marshaller) val res = new mutable.LinkedHashMap[String, marshaller.Node] - extensions.foreach { e ⇒ + extensions.foreach { e => val eAny = e.asInstanceOf[Extension[Any]] implicit val iu = eAny.iu if (iu.isMapNode(eAny.data)) { - iu.getMapKeys(e.data).map { key ⇒ - iu.getMapValue(e.data, key).foreach { value ⇒ + iu.getMapKeys(e.data).map { key => + iu.getMapValue(e.data, key).foreach { value => res(key) = value.convertMarshaled[marshaller.Node] } } diff --git a/src/main/scala/sangria/execution/SimpleAstBasedExtensionMiddleware.scala b/src/main/scala/sangria/execution/SimpleAstBasedExtensionMiddleware.scala index bc1cb3bc..30dccc76 100644 --- a/src/main/scala/sangria/execution/SimpleAstBasedExtensionMiddleware.scala +++ b/src/main/scala/sangria/execution/SimpleAstBasedExtensionMiddleware.scala @@ -2,7 +2,7 @@ package sangria.execution import sangria.ast -class SimpleAstBasedExtensionMiddleware[Ctx](extensionFn: MiddlewareQueryContext[Ctx, _, _] ⇒ ast.Value) extends Middleware[Ctx] with MiddlewareExtension[Ctx] { +class SimpleAstBasedExtensionMiddleware[Ctx](extensionFn: MiddlewareQueryContext[Ctx, _, _] => ast.Value) extends Middleware[Ctx] with MiddlewareExtension[Ctx] { override type QueryVal = Unit override def beforeQuery(context: MiddlewareQueryContext[Ctx, _, _]) = () diff --git a/src/main/scala/sangria/execution/TimeMeasurement.scala b/src/main/scala/sangria/execution/TimeMeasurement.scala index d9152437..6bb5c768 100644 --- a/src/main/scala/sangria/execution/TimeMeasurement.scala +++ b/src/main/scala/sangria/execution/TimeMeasurement.scala @@ -3,10 +3,10 @@ package sangria.execution case class TimeMeasurement(startMs: Long, endMs: Long, durationNanos: Long) object TimeMeasurement { - def measure[T](fn: ⇒ T): (T, TimeMeasurement) = { + def measure[T](fn: => T): (T, TimeMeasurement) = { val sw = StopWatch.start() val res = fn - res → sw.stop + res -> sw.stop } def empty = { diff --git a/src/main/scala/sangria/execution/ValueCoercionHelper.scala b/src/main/scala/sangria/execution/ValueCoercionHelper.scala index da12a791..5cf10c8f 100644 --- a/src/main/scala/sangria/execution/ValueCoercionHelper.scala +++ b/src/main/scala/sangria/execution/ValueCoercionHelper.scala @@ -19,10 +19,10 @@ class ValueCoercionHelper[Ctx](sourceMapper: Option[SourceMapper] = None, deprec fieldPath: List[String], marshaller: ResultMarshaller, pos: List[AstLocation] = Nil)(value: Either[Vector[Violation], Trinary[Any]]): Either[Vector[Violation], marshaller.Node] = value match { - case Right(v) if ofType.isOptional ⇒ Right(marshaller.optionalArrayNodeValue(v.asInstanceOf[Trinary[marshaller.Node]].toOption)) - case Right(Trinary.Defined(v)) ⇒ Right(v.asInstanceOf[marshaller.Node]) - case Right(Trinary.Undefined) | Right(Trinary.Null) ⇒ Left(Vector(NullValueForNotNullTypeViolation(fieldPath, SchemaRenderer.renderTypeName(ofType), sourceMapper, pos))) - case Left(violations) ⇒ Left(violations) + case Right(v) if ofType.isOptional => Right(marshaller.optionalArrayNodeValue(v.asInstanceOf[Trinary[marshaller.Node]].toOption)) + case Right(Trinary.Defined(v)) => Right(v.asInstanceOf[marshaller.Node]) + case Right(Trinary.Undefined) | Right(Trinary.Null) => Left(Vector(NullValueForNotNullTypeViolation(fieldPath, SchemaRenderer.renderTypeName(ofType), sourceMapper, pos))) + case Left(violations) => Left(violations) } def resolveMapValue( @@ -36,21 +36,21 @@ class ValueCoercionHelper[Ctx](sourceMapper: Option[SourceMapper] = None, deprec errors: VectorBuilder[Violation], pos: List[AstLocation] = Nil, isArgument: Boolean, - fromScalarMiddleware: Option[(Any, InputType[_]) ⇒ Option[Either[Violation, Any]]], + fromScalarMiddleware: Option[(Any, InputType[_]) => Option[Either[Violation, Any]]], allowErrorsOnDefault: Boolean = false, - valueMap: Nothing ⇒ Any = defaultValueMapFn, + valueMap: Nothing => Any = defaultValueMapFn, defaultValueInfo: Option[Cache[String, Any]] = None, undefinedValues: Option[VectorBuilder[String]] = None )( acc: marshaller.MapBuilder, value: Option[Either[Vector[Violation], Trinary[marshaller.Node]]] ): marshaller.MapBuilder = { - val valueMapTyped = valueMap.asInstanceOf[Any ⇒ marshaller.Node] + val valueMapTyped = valueMap.asInstanceOf[Any => marshaller.Node] def locations = inputFor match { - case Some(n) if n.location.isDefined && !pos.contains(n.location.get) ⇒ n.location.get +: pos - case _ ⇒ pos + case Some(n) if n.location.isDefined && !pos.contains(n.location.get) => n.location.get +: pos + case _ => pos } @@ -63,74 +63,74 @@ class ValueCoercionHelper[Ctx](sourceMapper: Option[SourceMapper] = None, deprec def getDefault = getCoercedDefault match { - case Right(Trinary.Defined(v)) ⇒ + case Right(Trinary.Defined(v)) => marshaller.addMapNodeElem(acc, fieldName, valueMapTyped(v), optional = ofType.isOptional) - case Right(Trinary.Undefined) | Right(Trinary.Null) | Right(_: Trinary.NullWithDefault[_]) ⇒ + case Right(Trinary.Undefined) | Right(Trinary.Null) | Right(_: Trinary.NullWithDefault[_]) => acc - case Left(violations) ⇒ + case Left(violations) => errors ++= violations acc } def updateDefaultInfo() = defaultValueInfo match { - case Some(dvi) if default.isDefined ⇒ + case Some(dvi) if default.isDefined => getCoercedDefault match { - case Right(Trinary.Defined(v)) ⇒ + case Right(Trinary.Defined(v)) => dvi(fieldName) = valueMapTyped(v) - case _ ⇒ // do nothing + case _ => // do nothing } - case _ ⇒ // do nothing + case _ => // do nothing } def updateDefaultInfoWithValue(v: Any) = defaultValueInfo match { - case Some(dvi) if default.isDefined ⇒ + case Some(dvi) if default.isDefined => dvi(fieldName) = valueMapTyped(v) - case _ ⇒ // do nothing + case _ => // do nothing } def updateUndefined() = undefinedValues match { - case Some(u) ⇒ + case Some(u) => u += fieldName - case _ ⇒ // do nothing + case _ => // do nothing } value match { - case None if default.isDefined ⇒ + case None if default.isDefined => updateUndefined() getDefault - case None if ofType.isOptional ⇒ + case None if ofType.isOptional => updateUndefined() acc - case None ⇒ + case None => updateUndefined() errors += NullValueForNotNullTypeViolation(fieldPath, SchemaRenderer.renderTypeName(ofType), sourceMapper, locations) acc - case Some(Right(Trinary.Null)) if ofType.isOptional ⇒ + case Some(Right(Trinary.Null)) if ofType.isOptional => updateDefaultInfo() marshaller.addMapNodeElem(acc, fieldName, marshaller.nullNode, optional = true) - case Some(Right(Trinary.NullWithDefault(v))) if ofType.isOptional ⇒ + case Some(Right(Trinary.NullWithDefault(v))) if ofType.isOptional => updateDefaultInfoWithValue(v) marshaller.addMapNodeElem(acc, fieldName, marshaller.nullNode, optional = true) - case Some(Right(Trinary.Undefined)) if default.isDefined ⇒ + case Some(Right(Trinary.Undefined)) if default.isDefined => updateUndefined() getDefault - case Some(Right(Trinary.Undefined)) if ofType.isOptional ⇒ + case Some(Right(Trinary.Undefined)) if ofType.isOptional => updateUndefined() acc - case Some(Right(Trinary.Null)) | Some(Right(Trinary.Undefined)) | Some(Right(Trinary.NullWithDefault(_))) ⇒ + case Some(Right(Trinary.Null)) | Some(Right(Trinary.Undefined)) | Some(Right(Trinary.NullWithDefault(_))) => updateUndefined() errors += NullValueForNotNullTypeViolation(fieldPath, SchemaRenderer.renderTypeName(ofType), sourceMapper, locations) acc - case Some(Right(Trinary.Defined(v))) ⇒ + case Some(Right(Trinary.Defined(v))) => marshaller.addMapNodeElem(acc, fieldName, valueMapTyped(v), ofType.isOptional) - case Some(Left(_)) if allowErrorsOnDefault && default.isDefined ⇒ + case Some(Left(_)) if allowErrorsOnDefault && default.isDefined => getDefault - case Some(Left(violations)) ⇒ + case Some(Left(violations)) => errors ++= violations acc } @@ -145,9 +145,9 @@ class ValueCoercionHelper[Ctx](sourceMapper: Option[SourceMapper] = None, deprec marshaller: ResultMarshaller, firstKindMarshaller: ResultMarshaller, isArgument: Boolean, - errorPrefix: ⇒ String = "", + errorPrefix: => String = "", nullWithDefault: Boolean = false, - fromScalarMiddleware: Option[(Any, InputType[_]) ⇒ Option[Either[Violation, Any]]] = None + fromScalarMiddleware: Option[(Any, InputType[_]) => Option[Either[Violation, Any]]] = None )(implicit iu: InputUnmarshaller[In]): Either[Vector[Violation], Trinary[marshaller.Node]] = { def defined(node: marshaller.Node): Trinary[marshaller.Node] = if (nullWithDefault) Trinary.NullWithDefault(node) @@ -172,86 +172,86 @@ class ValueCoercionHelper[Ctx](sourceMapper: Option[SourceMapper] = None, deprec isArgument ))) - def resolveSuccessfulCoercedScalar(v: Any, outFn: Any ⇒ Any, scalar: ScalarType[Any], value: In) = { + def resolveSuccessfulCoercedScalar(v: Any, outFn: Any => Any, scalar: ScalarType[Any], value: In) = { val prepared = firstKindMarshaller match { - case raw: RawResultMarshaller ⇒ raw.rawScalarNode(v) - case standard ⇒ Resolver.marshalScalarValue(scalar.coerceOutput(outFn(v), standard.capabilities), standard, scalar.name, scalar.scalarInfo) + case raw: RawResultMarshaller => raw.rawScalarNode(v) + case standard => Resolver.marshalScalarValue(scalar.coerceOutput(outFn(v), standard.capabilities), standard, scalar.name, scalar.scalarInfo) } Right(defined(prepared.asInstanceOf[marshaller.Node])) } - def resolveCoercedScalar(coerced: Either[Violation, Any], outFn: Any ⇒ Any, scalar: ScalarType[Any], actualType: InputType[_], value: In) = + def resolveCoercedScalar(coerced: Either[Violation, Any], outFn: Any => Any, scalar: ScalarType[Any], actualType: InputType[_], value: In) = coerced.fold( - violation ⇒ Left(Vector(FieldCoercionViolation(fieldPath, violation, sourceMapper, valuePosition(inputFor, value), errorPrefix, isArgument))), - v ⇒ { + violation => Left(Vector(FieldCoercionViolation(fieldPath, violation, sourceMapper, valuePosition(inputFor, value), errorPrefix, isArgument))), + v => { fromScalarMiddleware match { - case Some(fn) ⇒ fn(v, actualType) match { - case Some(Left(violation)) ⇒ + case Some(fn) => fn(v, actualType) match { + case Some(Left(violation)) => Left(Vector(FieldCoercionViolation(fieldPath, violation, sourceMapper, valuePosition(inputFor, value), errorPrefix, isArgument))) - case Some(Right(newv)) ⇒ + case Some(Right(newv)) => resolveSuccessfulCoercedScalar(newv, outFn, scalar, value) - case None ⇒ + case None => resolveSuccessfulCoercedScalar(v, outFn, scalar, value) } - case None ⇒ resolveSuccessfulCoercedScalar(v, outFn, scalar, value) + case None => resolveSuccessfulCoercedScalar(v, outFn, scalar, value) } }) (tpe, input) match { - case (_, node) if iu.isVariableNode(node) ⇒ + case (_, node) if iu.isVariableNode(node) => val varName = iu.getVariableName(node) variables match { - case Some(vars) ⇒ + case Some(vars) => vars.get(varName) match { - case Some(vv) ⇒ + case Some(vv) => val res = vv.resolve(marshaller, firstKindMarshaller, tpe) match { - case resolved @ Right(_) ⇒ resolved.asInstanceOf[Either[Vector[Violation], Trinary[marshaller.Node]]] - case errors @ Left(_) ⇒ errors.asInstanceOf[Either[Vector[Violation], Trinary[marshaller.Node]]] + case resolved @ Right(_) => resolved.asInstanceOf[Either[Vector[Violation], Trinary[marshaller.Node]]] + case errors @ Left(_) => errors.asInstanceOf[Either[Vector[Violation], Trinary[marshaller.Node]]] } res - case None ⇒ + case None => Right(Trinary.Undefined) } - case None ⇒ + case None => Left(Vector(VariableNotAllowedViolation(varName, sourceMapper, Nil))) } - case (OptionInputType(ofType), value) if iu.isDefined(value) ⇒ + case (OptionInputType(ofType), value) if iu.isDefined(value) => coerceInputValue(ofType, fieldPath, value, inputFor, variables, marshaller, firstKindMarshaller, isArgument, errorPrefix, nullWithDefault, fromScalarMiddleware) - case (OptionInputType(ofType), value) ⇒ + case (OptionInputType(ofType), value) => Right(Trinary.Null) - case (ListInputType(ofType), values) if iu.isListNode(values) ⇒ + case (ListInputType(ofType), values) if iu.isListNode(values) => val res = iu.getListValue(values).toVector.map { - case defined if iu.isDefined(defined) ⇒ + case defined if iu.isDefined(defined) => resolveListValue(ofType, fieldPath, marshaller, valuePosition(inputFor, defined))( coerceInputValue(ofType, fieldPath, defined, inputFor, variables, firstKindMarshaller, firstKindMarshaller, isArgument, errorPrefix, nullWithDefault, fromScalarMiddleware)) - case v ⇒ + case v => resolveListValue(ofType, fieldPath, marshaller, valuePosition(inputFor, v, values))(Right(Trinary.Null)) } val (errors, successes) = res.partition(_.isLeft) - if (errors.nonEmpty) Left(errors.collect{case Left(es) ⇒ es}.flatten) - else Right(defined(marshaller.arrayNode(successes.collect {case Right(v) ⇒ v}))) + if (errors.nonEmpty) Left(errors.collect{case Left(es) => es}.flatten) + else Right(defined(marshaller.arrayNode(successes.collect {case Right(v) => v}))) - case (ListInputType(ofType), value) if iu.isDefined(value) ⇒ + case (ListInputType(ofType), value) if iu.isDefined(value) => val res = resolveListValue(ofType, fieldPath, marshaller, valuePosition(inputFor, value))( coerceInputValue(ofType, fieldPath, value, inputFor, variables, firstKindMarshaller, firstKindMarshaller, isArgument, errorPrefix, nullWithDefault, fromScalarMiddleware)) res match { - case Right(v) ⇒ Right(defined(marshaller.arrayNode(Vector(v)))) - case Left(violations) ⇒ Left(violations) + case Right(v) => Right(defined(marshaller.arrayNode(Vector(v)))) + case Left(violations) => Left(violations) } - case (lt @ ListInputType(ofType), value) ⇒ + case (lt @ ListInputType(ofType), value) => Left(Vector(FieldCoercionViolation( fieldPath, NullValueForNotNullTypeViolation(fieldPath, SchemaRenderer.renderTypeName(lt), sourceMapper, valuePosition(inputFor, value)), @@ -260,17 +260,17 @@ class ValueCoercionHelper[Ctx](sourceMapper: Option[SourceMapper] = None, deprec errorPrefix, isArgument))) - case (objTpe: InputObjectType[_], valueMap) if iu.isMapNode(valueMap) ⇒ + case (objTpe: InputObjectType[_], valueMap) if iu.isMapNode(valueMap) => val errors = new VectorBuilder[Violation] val res = objTpe.fields.foldLeft(firstKindMarshaller.emptyMapNode(objTpe.fields.map(_.name))) { - case (acc, field) ⇒ iu.getMapValue(valueMap, field.name) match { - case Some(defined) if iu.isDefined(defined) ⇒ + case (acc, field) => iu.getMapValue(valueMap, field.name) match { + case Some(defined) if iu.isDefined(defined) => resolveMapValue(field.fieldType, fieldPath :+ field.name, field.defaultValue, inputFor, field.name, firstKindMarshaller, firstKindMarshaller, errors, valuePosition(inputFor, defined), isArgument, fromScalarMiddleware)( acc, Some(coerceInputValue(field.fieldType, fieldPath :+ field.name, defined, inputFor, variables, firstKindMarshaller, firstKindMarshaller, false, errorPrefix, nullWithDefault, fromScalarMiddleware))) - case Some(defined) ⇒ + case Some(defined) => resolveMapValue(field.fieldType, fieldPath :+ field.name, field.defaultValue, inputFor, field.name, firstKindMarshaller, firstKindMarshaller, errors, valuePosition(inputFor, valueMap), isArgument, fromScalarMiddleware)(acc, Some(Right(Trinary.Null))) - case _ ⇒ + case _ => resolveMapValue(field.fieldType, fieldPath :+ field.name, field.defaultValue, inputFor, field.name, firstKindMarshaller, firstKindMarshaller, errors, valuePosition(inputFor, valueMap), isArgument, fromScalarMiddleware)(acc, None) } } @@ -280,10 +280,10 @@ class ValueCoercionHelper[Ctx](sourceMapper: Option[SourceMapper] = None, deprec if (errorRes.nonEmpty) Left(errorRes) else Right(defined(firstKindMarshaller.mapNode(res).asInstanceOf[marshaller.Node])) - case (objTpe: InputObjectType[_], value) if iu.isDefined(value) ⇒ + case (objTpe: InputObjectType[_], value) if iu.isDefined(value) => Left(Vector(InputObjectTypeMismatchViolation(fieldPath, SchemaRenderer.renderTypeName(objTpe), iu.render(value), sourceMapper, valuePosition(inputFor, value)))) - case (objTpe: InputObjectType[_], value) ⇒ + case (objTpe: InputObjectType[_], value) => Left(Vector(FieldCoercionViolation( fieldPath, NullValueForNotNullTypeViolation(fieldPath, SchemaRenderer.renderTypeName(objTpe), sourceMapper, valuePosition(inputFor, value)), @@ -292,61 +292,61 @@ class ValueCoercionHelper[Ctx](sourceMapper: Option[SourceMapper] = None, deprec errorPrefix, isArgument))) - case (scalar: ScalarType[_], value) if iu.isScalarNode(value) ⇒ + case (scalar: ScalarType[_], value) if iu.isScalarNode(value) => val coerced = iu.getScalarValue(value) match { - case node: ast.Value ⇒ scalar.coerceInput(node) - case other ⇒ scalar.coerceUserInput(other) + case node: ast.Value => scalar.coerceInput(node) + case other => scalar.coerceUserInput(other) } resolveCoercedScalar(coerced, identity, scalar.asInstanceOf[ScalarType[Any]], scalar, value) - case (_: ScalarType[_], value) if iu.isDefined(value) ⇒ + case (_: ScalarType[_], value) if iu.isDefined(value) => invalidScalarViolation(value) - case (scalar: ScalarType[_], value) ⇒ + case (scalar: ScalarType[_], value) => nullScalarViolation(scalar, value) - case (scalar: ScalarAlias[Any, Any] @unchecked, value) if iu.isScalarNode(value) ⇒ + case (scalar: ScalarAlias[Any, Any] @unchecked, value) if iu.isScalarNode(value) => val coerced = iu.getScalarValue(value) match { - case node: ast.Value ⇒ scalar.aliasFor.coerceInput(node) - case other ⇒ scalar.aliasFor.coerceUserInput(other) + case node: ast.Value => scalar.aliasFor.coerceInput(node) + case other => scalar.aliasFor.coerceUserInput(other) } val fromAlias = coerced match { - case l: Left[Violation, Any] ⇒ l - case Right(v) ⇒ scalar.fromScalar(v) + case l: Left[Violation, Any] => l + case Right(v) => scalar.fromScalar(v) } resolveCoercedScalar(fromAlias, scalar.toScalar, scalar.aliasFor, scalar, value) - case (_: ScalarAlias[_, _], value) if iu.isDefined(value) ⇒ + case (_: ScalarAlias[_, _], value) if iu.isDefined(value) => invalidScalarViolation(value) - case (scalar: ScalarAlias[_, _], value) ⇒ + case (scalar: ScalarAlias[_, _], value) => nullScalarViolation(scalar.aliasFor, value) - case (enum: EnumType[_], value) if iu.isEnumNode(value) ⇒ + case (enum: EnumType[_], value) if iu.isEnumNode(value) => val coerced = iu.getScalarValue(value) match { - case node: ast.Value ⇒ enum.coerceInput(node) - case other ⇒ enum.coerceUserInput(other) + case node: ast.Value => enum.coerceInput(node) + case other => enum.coerceUserInput(other) } - coerced.fold(violation ⇒ Left(Vector(FieldCoercionViolation(fieldPath, violation, sourceMapper, valuePosition(inputFor, value), errorPrefix, isArgument))), { - case (v, deprecated) ⇒ + coerced.fold(violation => Left(Vector(FieldCoercionViolation(fieldPath, violation, sourceMapper, valuePosition(inputFor, value), errorPrefix, isArgument))), { + case (v, deprecated) => if (deprecated && userContext.isDefined) deprecationTracker.deprecatedEnumValueUsed(enum, v, userContext.get) val prepared = firstKindMarshaller match { - case raw: RawResultMarshaller ⇒ raw.rawScalarNode(v) - case standard ⇒ Resolver.marshalEnumValue(enum.coerceOutput(v), standard, enum.name) + case raw: RawResultMarshaller => raw.rawScalarNode(v) + case standard => Resolver.marshalEnumValue(enum.coerceOutput(v), standard, enum.name) } Right(defined(prepared.asInstanceOf[marshaller.Node])) }) - case (enum: EnumType[_], value) if iu.isDefined(value) ⇒ + case (enum: EnumType[_], value) if iu.isDefined(value) => Left(Vector(FieldCoercionViolation(fieldPath, EnumCoercionViolation, sourceMapper, valuePosition(inputFor, value), errorPrefix, isArgument))) - case (enum: EnumType[_], value) ⇒ + case (enum: EnumType[_], value) => Left(Vector(FieldCoercionViolation( fieldPath, NullValueForNotNullTypeViolation(fieldPath, SchemaRenderer.renderTypeName(enum), sourceMapper, valuePosition(inputFor, value)), @@ -359,93 +359,93 @@ class ValueCoercionHelper[Ctx](sourceMapper: Option[SourceMapper] = None, deprec private def valuePosition[T](forNode: Option[ast.AstNode], value: T*): List[AstLocation] = { val values = value.view.collect { - case node: ast.AstNode if node.location.isDefined ⇒ node.location.toList + case node: ast.AstNode if node.location.isDefined => node.location.toList } val nodeLocations: List[AstLocation] = forNode match { - case Some(n) if n.location.isDefined ⇒ List(n.location.get) - case _ ⇒ Nil + case Some(n) if n.location.isDefined => List(n.location.get) + case _ => Nil } values.headOption.fold(nodeLocations)(nodeLocations ++ _) } def isValidValue[In](tpe: InputType[_], input: Option[In])(implicit um: InputUnmarshaller[In]): Vector[Violation] = (tpe, input) match { - case (OptionInputType(ofType), Some(value)) if um.isDefined(value) ⇒ isValidValue(ofType, Some(value)) - case (OptionInputType(_), _) ⇒ Vector.empty - case (_, None) ⇒ Vector(NotNullValueIsNullViolation(sourceMapper, Nil)) - - case (ListInputType(ofType), Some(values)) if um.isListNode(values) ⇒ - um.getListValue(values).toVector.flatMap(v ⇒ isValidValue(ofType, v match { - case opt: Option[In @unchecked] ⇒ opt - case other ⇒ Option(other) + case (OptionInputType(ofType), Some(value)) if um.isDefined(value) => isValidValue(ofType, Some(value)) + case (OptionInputType(_), _) => Vector.empty + case (_, None) => Vector(NotNullValueIsNullViolation(sourceMapper, Nil)) + + case (ListInputType(ofType), Some(values)) if um.isListNode(values) => + um.getListValue(values).toVector.flatMap(v => isValidValue(ofType, v match { + case opt: Option[In @unchecked] => opt + case other => Option(other) }) map (ListValueViolation(0, _, sourceMapper, Nil))) - case (ListInputType(ofType), Some(value)) if um.isDefined(value) ⇒ + case (ListInputType(ofType), Some(value)) if um.isDefined(value) => isValidValue(ofType, value match { - case opt: Option[In @unchecked] ⇒ opt - case other ⇒ Option(other) + case opt: Option[In @unchecked] => opt + case other => Option(other) }) map (ListValueViolation(0, _, sourceMapper, Nil)) - case (objTpe: InputObjectType[_], Some(valueMap)) if um.isMapNode(valueMap) ⇒ + case (objTpe: InputObjectType[_], Some(valueMap)) if um.isMapNode(valueMap) => val unknownFields = um.getMapKeys(valueMap).toVector.collect { - case f if !objTpe.fieldsByName.contains(f) ⇒ + case f if !objTpe.fieldsByName.contains(f) => UnknownInputObjectFieldViolation(SchemaRenderer.renderTypeName(objTpe, true), f, sourceMapper, Nil) } val fieldViolations = - objTpe.fields.toVector.flatMap(f ⇒ + objTpe.fields.toVector.flatMap(f => isValidValue(f.fieldType, um.getMapValue(valueMap, f.name)) map (MapValueViolation(f.name, _, sourceMapper, Nil))) fieldViolations ++ unknownFields - case (objTpe: InputObjectType[_], _) ⇒ + case (objTpe: InputObjectType[_], _) => Vector(InputObjectIsOfWrongTypeMissingViolation(SchemaRenderer.renderTypeName(objTpe, true), sourceMapper, Nil)) - case (scalar: ScalarType[_], Some(value)) if um.isScalarNode(value) ⇒ + case (scalar: ScalarType[_], Some(value)) if um.isScalarNode(value) => val coerced = um.getScalarValue(value) match { - case node: ast.Value ⇒ scalar.coerceInput(node) - case other ⇒ scalar.coerceUserInput(other) + case node: ast.Value => scalar.coerceInput(node) + case other => scalar.coerceUserInput(other) } coerced match { - case Left(violation) ⇒ Vector(violation) - case _ ⇒ Vector.empty + case Left(violation) => Vector(violation) + case _ => Vector.empty } - case (scalar: ScalarAlias[_, _], Some(value)) if um.isScalarNode(value) ⇒ + case (scalar: ScalarAlias[_, _], Some(value)) if um.isScalarNode(value) => val coerced = um.getScalarValue(value) match { - case node: ast.Value ⇒ scalar.aliasFor.coerceInput(node) - case other ⇒ scalar.aliasFor.coerceUserInput(other) + case node: ast.Value => scalar.aliasFor.coerceInput(node) + case other => scalar.aliasFor.coerceUserInput(other) } coerced match { - case Left(violation) ⇒ Vector(violation) - case Right(v) ⇒ scalar.fromScalar(v) match { - case Left(violation) ⇒ Vector(violation) - case _ ⇒ Vector.empty + case Left(violation) => Vector(violation) + case Right(v) => scalar.fromScalar(v) match { + case Left(violation) => Vector(violation) + case _ => Vector.empty } } - case (enum: EnumType[_], Some(value)) if um.isEnumNode(value) ⇒ + case (enum: EnumType[_], Some(value)) if um.isEnumNode(value) => val coerced = um.getScalarValue(value) match { - case node: ast.Value ⇒ enum.coerceInput(node) - case other ⇒ enum.coerceUserInput(other) + case node: ast.Value => enum.coerceInput(node) + case other => enum.coerceUserInput(other) } coerced match { - case Left(violation) ⇒ Vector(violation) - case _ ⇒ Vector.empty + case Left(violation) => Vector(violation) + case _ => Vector.empty } - case (enum: EnumType[_], Some(value)) ⇒ + case (enum: EnumType[_], Some(value)) => Vector(EnumCoercionViolation) - case _ ⇒ + case _ => Vector(GenericInvalidValueViolation(sourceMapper, Nil)) } - def getVariableValue[In](definition: ast.VariableDefinition, tpe: InputType[_], input: Option[In], fromScalarMiddleware: Option[(Any, InputType[_]) ⇒ Option[Either[Violation, Any]]])(implicit um: InputUnmarshaller[In]): Either[Vector[Violation], Option[VariableValue]] = { + def getVariableValue[In](definition: ast.VariableDefinition, tpe: InputType[_], input: Option[In], fromScalarMiddleware: Option[(Any, InputType[_]) => Option[Either[Violation, Any]]])(implicit um: InputUnmarshaller[In]): Either[Vector[Violation], Option[VariableValue]] = { val violations = isValidValue(tpe, input) if (violations.isEmpty) { @@ -455,54 +455,54 @@ class ValueCoercionHelper[Ctx](sourceMapper: Option[SourceMapper] = None, deprec import sangria.marshalling.queryAst.queryAstInputUnmarshaller definition.defaultValue match { - case Some(dv) ⇒ - Right(Some(VariableValue((marshaller, firstKindMarshaller, actualType) ⇒ + case Some(dv) => + Right(Some(VariableValue((marshaller, firstKindMarshaller, actualType) => coerceInputValue(actualType, fieldPath, dv, Some(definition), None, marshaller, firstKindMarshaller, nullWithDefault = input.nonEmpty, fromScalarMiddleware = fromScalarMiddleware, isArgument = false)))) - case None ⇒ + case None => val emptyValue = if (input.isEmpty) Trinary.Undefined else Trinary.Null - Right(Some(VariableValue((_, _, _) ⇒ Right(emptyValue)))) + Right(Some(VariableValue((_, _, _) => Right(emptyValue)))) } } else - Right(Some(VariableValue((marshaller, firstKindMarshaller, actualType) ⇒ + Right(Some(VariableValue((marshaller, firstKindMarshaller, actualType) => coerceInputValue(actualType, fieldPath, input.get, Some(definition), None, marshaller, firstKindMarshaller, fromScalarMiddleware = fromScalarMiddleware, isArgument = false)))) - } else Left(violations.map(violation ⇒ + } else Left(violations.map(violation => VarTypeMismatchViolation(definition.name, QueryRenderer.render(definition.tpe), input map um.render, violation: Violation, sourceMapper, definition.location.toList))) } } object ValueCoercionHelper { - private val defaultValueMapFn = (x: Any) ⇒ x + private val defaultValueMapFn = (x: Any) => x lazy val default = new ValueCoercionHelper[Unit] } sealed trait Trinary[+T] { def toOption: Option[T] = this match { - case Trinary.Null | Trinary.Undefined | Trinary.NullWithDefault(_) ⇒ None - case Trinary.Defined(v) ⇒ Some(v) + case Trinary.Null | Trinary.Undefined | Trinary.NullWithDefault(_) => None + case Trinary.Defined(v) => Some(v) } - def map[R](fn: T ⇒ R): Trinary[R] + def map[R](fn: T => R): Trinary[R] } object Trinary { case object Null extends Trinary[Nothing] { - def map[R](fn: Nothing ⇒ R) = this + def map[R](fn: Nothing => R) = this } case object Undefined extends Trinary[Nothing] { - def map[R](fn: Nothing ⇒ R) = this + def map[R](fn: Nothing => R) = this } case class Defined[T](value: T) extends Trinary[T] { - def map[R](fn: T ⇒ R) = Defined(fn(value)) + def map[R](fn: T => R) = Defined(fn(value)) } case class NullWithDefault[T](defaultValue: T) extends Trinary[T] { - def map[R](fn: T ⇒ R) = NullWithDefault(fn(defaultValue)) + def map[R](fn: T => R) = NullWithDefault(fn(defaultValue)) } } diff --git a/src/main/scala/sangria/execution/ValueCollector.scala b/src/main/scala/sangria/execution/ValueCollector.scala index 6588c867..becc4c7c 100644 --- a/src/main/scala/sangria/execution/ValueCollector.scala +++ b/src/main/scala/sangria/execution/ValueCollector.scala @@ -11,39 +11,39 @@ import sangria.validation._ import scala.collection.immutable.VectorBuilder import scala.util.{Failure, Success, Try} -class ValueCollector[Ctx, Input](schema: Schema[_, _], inputVars: Input, sourceMapper: Option[SourceMapper], deprecationTracker: DeprecationTracker, userContext: Ctx, exceptionHandler: ExceptionHandler, fromScalarMiddleware: Option[(Any, InputType[_]) ⇒ Option[Either[Violation, Any]]], ignoreErrors: Boolean)(implicit um: InputUnmarshaller[Input]) { +class ValueCollector[Ctx, Input](schema: Schema[_, _], inputVars: Input, sourceMapper: Option[SourceMapper], deprecationTracker: DeprecationTracker, userContext: Ctx, exceptionHandler: ExceptionHandler, fromScalarMiddleware: Option[(Any, InputType[_]) => Option[Either[Violation, Any]]], ignoreErrors: Boolean)(implicit um: InputUnmarshaller[Input]) { val coercionHelper = new ValueCoercionHelper[Ctx](sourceMapper, deprecationTracker, Some(userContext)) private val argumentCache = Cache.empty[(ExecutionPath.PathCacheKey, Vector[ast.Argument]), Try[Args]] - def getVariableValues(definitions: Vector[ast.VariableDefinition], fromScalarMiddleware: Option[(Any, InputType[_]) ⇒ Option[Either[Violation, Any]]]): Try[Map[String, VariableValue]] = + def getVariableValues(definitions: Vector[ast.VariableDefinition], fromScalarMiddleware: Option[(Any, InputType[_]) => Option[Either[Violation, Any]]]): Try[Map[String, VariableValue]] = if (!um.isMapNode(inputVars)) Failure(new ExecutionError(s"Variables should be a map-like object, like JSON object. Got: ${um.render(inputVars)}", exceptionHandler)) else { val res = definitions.foldLeft(Vector.empty[(String, Either[Vector[Violation], VariableValue])]) { - case (acc, varDef) ⇒ + case (acc, varDef) => val value = schema.getInputType(varDef.tpe) .map(coercionHelper.getVariableValue(varDef, _, um.getRootMapValue(inputVars, varDef.name), fromScalarMiddleware)) .getOrElse(Left(Vector(UnknownVariableTypeViolation(varDef.name, QueryRenderer.render(varDef.tpe), sourceMapper, varDef.location.toList)))) value match { - case Right(Some(v)) ⇒ acc :+ (varDef.name → Right(v)) - case Right(None) ⇒ acc - case Left(violations) ⇒ acc :+ (varDef.name → Left(violations)) + case Right(Some(v)) => acc :+ (varDef.name -> Right(v)) + case Right(None) => acc + case Left(violations) => acc :+ (varDef.name -> Left(violations)) } } val (errors, values) = res.partition(_._2.isLeft) - if (errors.nonEmpty) Failure(VariableCoercionError(errors.collect{case (name, Left(errors)) ⇒ errors}.flatten, exceptionHandler)) - else Success(Map(values.collect {case (name, Right(v)) ⇒ name → v}: _*)) + if (errors.nonEmpty) Failure(VariableCoercionError(errors.collect{case (name, Left(errors)) => errors}.flatten, exceptionHandler)) + else Success(Map(values.collect {case (name, Right(v)) => name -> v}: _*)) } def getFieldArgumentValues(path: ExecutionPath, forAstNode: Option[ast.AstNode], argumentDefs: List[Argument[_]], argumentAsts: Vector[ast.Argument], variables: Map[String, VariableValue]): Try[Args] = if(argumentDefs.isEmpty) ValueCollector.emptyArgs else - argumentCache.getOrElseUpdate(path.cacheKey → argumentAsts, getArgumentValues(forAstNode, argumentDefs, argumentAsts, variables)) + argumentCache.getOrElseUpdate(path.cacheKey -> argumentAsts, getArgumentValues(forAstNode, argumentDefs, argumentAsts, variables)) def getArgumentValues( forAstNode: Option[ast.AstNode], @@ -65,7 +65,7 @@ object ValueCollector { exceptionHandler: ExceptionHandler, ignoreErrors: Boolean = false, sourceMapper: Option[SourceMapper] = None, - fromScalarMiddleware: Option[(Any, InputType[_]) ⇒ Option[Either[Violation, Any]]] = None + fromScalarMiddleware: Option[(Any, InputType[_]) => Option[Either[Violation, Any]]] = None ): Try[Args] = { import coercionHelper._ @@ -79,7 +79,7 @@ object ValueCollector { val undefinedArgs = Some(new VectorBuilder[String]) val res = argumentDefs.foldLeft(marshaller.emptyMapNode(argumentDefs.map(_.name)): marshaller.MapBuilder) { - case (acc, argDef) ⇒ + case (acc, argDef) => val argPath = argDef.name :: Nil val astValue = astArgMap get argDef.name map (_.value) val fromInput = argDef.fromInput @@ -90,7 +90,7 @@ object ValueCollector { resolveMapValue(argDef.argumentType, argPath, argDef.defaultValue, forAstNode, argDef.name, marshaller, fromInput.marshaller, errors = errors, valueMap = fromInput.fromResult, defaultValueInfo = defaultInfo, undefinedValues = undefinedArgs, isArgument = true, fromScalarMiddleware = fromScalarMiddleware)( acc, astValue map (coerceInputValue(argDef.argumentType, argPath, _, forAstNode, Some(variables), marshaller, fromInput.marshaller, fromScalarMiddleware = fromScalarMiddleware, isArgument = true))) } catch { - case InputParsingError(e) ⇒ + case InputParsingError(e) => errors ++= e.map(InvalidInputValueViolation(argDef.name, _, sourceMapper, astValue.flatMap(_.location).toList)) acc } @@ -109,10 +109,10 @@ object ValueCollector { } } -case class VariableValue(fn: (ResultMarshaller, ResultMarshaller, InputType[_]) ⇒ Either[Vector[Violation], Trinary[ResultMarshaller#Node]]) { +case class VariableValue(fn: (ResultMarshaller, ResultMarshaller, InputType[_]) => Either[Vector[Violation], Trinary[ResultMarshaller#Node]]) { private val cache = Cache.empty[(Int, Int), Either[Vector[Violation], Trinary[ResultMarshaller#Node]]] def resolve(marshaller: ResultMarshaller, firstKindMarshaller: ResultMarshaller, actualType: InputType[_]): Either[Vector[Violation], Trinary[firstKindMarshaller.Node]] = - cache.getOrElseUpdate(System.identityHashCode(firstKindMarshaller) → System.identityHashCode(actualType.namedType), + cache.getOrElseUpdate(System.identityHashCode(firstKindMarshaller) -> System.identityHashCode(actualType.namedType), fn(marshaller, firstKindMarshaller, actualType)).asInstanceOf[Either[Vector[Violation], Trinary[firstKindMarshaller.Node]]] } diff --git a/src/main/scala/sangria/execution/batch/BatchExecutor.scala b/src/main/scala/sangria/execution/batch/BatchExecutor.scala index 0fdca9a7..0dfe59af 100644 --- a/src/main/scala/sangria/execution/batch/BatchExecutor.scala +++ b/src/main/scala/sangria/execution/batch/BatchExecutor.scala @@ -37,10 +37,10 @@ object BatchExecutor { description = Some("Make the field value available for other operations via variable."), arguments = AsArg :: Nil, locations = Set(DirectiveLocation.Field), - shouldInclude = _ ⇒ true) + shouldInclude = _ => true) - val OperationNameExtension: Middleware[Any] = Middleware.simpleExtension[Any](ctx ⇒ - ast.ObjectValue("batch" → ast.ObjectValue("operationName" → + val OperationNameExtension: Middleware[Any] = Middleware.simpleExtension[Any](ctx => + ast.ObjectValue("batch" -> ast.ObjectValue("operationName" -> ctx.operationName.fold(ast.NullValue(): ast.Value)(ast.StringValue(_))))) /** __EXPERIMENTAL__ */ @@ -63,8 +63,8 @@ object BatchExecutor { val executor = Executor(schema, QueryValidator.empty, deferredResolver, exceptionHandler, deprecationTracker, middleware, maxQueryDepth, queryReducers) val validations = validateOperationNames(queryAst, operationNames, exceptionHandler) - .flatMap(_ ⇒ calcExecutionPlan(schema, queryAst, operationNames, inferVariableDefinitions, exceptionHandler)) - .flatMap { case res @ (updatedDocument, _) ⇒ + .flatMap(_ => calcExecutionPlan(schema, queryAst, operationNames, inferVariableDefinitions, exceptionHandler)) + .flatMap { case res @ (updatedDocument, _) => val violations = queryValidator.validateQuery(schema, updatedDocument) if (violations.nonEmpty) Failure(ValidationError(violations, exceptionHandler)) @@ -76,23 +76,23 @@ object BatchExecutor { val convertedVariables = convertVariables(variables, marshaller) validations match { - case Failure(e) ⇒ scheme.failed(e) - case Success((updatedDocument, executionPlan)) ⇒ + case Failure(e) => scheme.failed(e) + case Success((updatedDocument, executionPlan)) => scheme match { - case ss: ExecutionScheme.StreamBasedExecutionScheme[_] ⇒ + case ss: ExecutionScheme.StreamBasedExecutionScheme[_] => val childScheme = if (scheme.extended) ExecutionScheme.Extended else ExecutionScheme.Default val futures = - doExecuteBatchPlan(executionPlan, marshaller, variables, convertedVariables, childScheme.extended, single = false) { (opName, vars, iu) ⇒ + doExecuteBatchPlan(executionPlan, marshaller, variables, convertedVariables, childScheme.extended, single = false) { (opName, vars, iu) => implicit val iiu = iu executeIndividual(executor, updatedDocument, opName, userContext, root, vars, childScheme).asInstanceOf[Future[AnyRef]] } ss.subscriptionStream.merge(futures.map(ss.subscriptionStream.singleFuture)).asInstanceOf[scheme.Result[Ctx, T]] - case es ⇒ + case es => val futures = - doExecuteBatchPlan(executionPlan, marshaller, variables, convertedVariables, es.extended, single = true) { (opName, vars, iu) ⇒ + doExecuteBatchPlan(executionPlan, marshaller, variables, convertedVariables, es.extended, single = true) { (opName, vars, iu) => implicit val iiu = iu executeIndividual(executor, updatedDocument, opName, userContext, root, variables, es).asInstanceOf[Future[AnyRef]] @@ -112,7 +112,7 @@ object BatchExecutor { if (iu.isMapNode(variables)) { val keys = iu.getMapKeys(variables) - keys.flatMap(k ⇒ iu.getRootMapValue(variables, k).map(v ⇒ k → v.convertMarshaled[M])).toMap + keys.flatMap(k => iu.getRootMapValue(variables, k).map(v => k -> v.convertMarshaled[M])).toMap } else Map.empty } @@ -125,19 +125,19 @@ object BatchExecutor { extendedScheme: Boolean, single: Boolean )( - executeFn: (String, Any, InputUnmarshaller[Any]) ⇒ Future[T] + executeFn: (String, Any, InputUnmarshaller[Any]) => Future[T] )(implicit executionContext: ExecutionContext, inputUnmarshaller: InputUnmarshaller[In]): Vector[Future[T]] = { val inProgress = new mutable.HashMap[String, Future[T]] def loop(opName: String, deps: Vector[(String, Set[String])]): Future[T] = { inProgress.getOrElseUpdate(opName, { if (deps.nonEmpty) { - val depFutures = deps.map {case d @ (dep, _) ⇒ loop(dep, plan.dependencies(dep)).map(d → _)} + val depFutures = deps.map {case d @ (dep, _) => loop(dep, plan.dependencies(dep)).map(d -> _)} - Future.sequence(depFutures).flatMap { resolved ⇒ + Future.sequence(depFutures).flatMap { resolved => collectVariables(opName, plan, resolved, marshaller, convertedVariables, extendedScheme) match { - case Some(newVars) ⇒ executeFn(opName, newVars, marshaller.inputUnmarshaller.asInstanceOf[InputUnmarshaller[Any]]) - case None ⇒ executeFn(opName, origVariables, inputUnmarshaller.asInstanceOf[InputUnmarshaller[Any]]) + case Some(newVars) => executeFn(opName, newVars, marshaller.inputUnmarshaller.asInstanceOf[InputUnmarshaller[Any]]) + case None => executeFn(opName, origVariables, inputUnmarshaller.asInstanceOf[InputUnmarshaller[Any]]) } } @@ -151,7 +151,7 @@ object BatchExecutor { } breakable { - plan.dependencies.foreach { case (opName, deps) ⇒ + plan.dependencies.foreach { case (opName, deps) => loop(opName, deps) } } @@ -164,9 +164,9 @@ object BatchExecutor { val iu = marshaller.inputUnmarshaller val m = marshaller.marshaller - depValues.foreach { depVal ⇒ + depValues.foreach { depVal => val ((operationName, neededExports), executionResult) = depVal - val exports = plan.exportOperations(operationName).exports.filter(e ⇒ neededExports contains e.exportedName) + val exports = plan.exportOperations(operationName).exports.filter(e => neededExports contains e.exportedName) val result = if (extendedScheme) executionResult.asInstanceOf[ExecutionResult[Any, M]].result else executionResult.asInstanceOf[M] def visitPath(name: String, result: M, path: Vector[String]): Unit = { @@ -181,26 +181,26 @@ object BatchExecutor { val key = path.head val childPath = path.tail - iu.getMapValue(result, key).foreach { value ⇒ + iu.getMapValue(result, key).foreach { value => if (iu.isListNode(value)) iu.getListValue(value).foreach(visitPath(name, _, childPath)) else visitPath(name, value, childPath) } } } - exports.foreach { export ⇒ + exports.foreach { export => visitPath(export.exportedName, result, "data" +: export.path) } } // merge in original variables with overlapping keys - origVariables.foreach { case (key, value) ⇒ + origVariables.foreach { case (key, value) => collectedValues.get(key) match { - case Some(list) if iu.isListNode(value) ⇒ + case Some(list) if iu.isListNode(value) => list ++= iu.getListValue(value) - case Some(list) ⇒ + case Some(list) => list += value - case _ ⇒ // do nothing + case _ => // do nothing } } @@ -210,7 +210,7 @@ object BatchExecutor { val collectedKeys = collectedValues.keySet val builder = m.emptyMapNode(origVariables.keys.filterNot(collectedKeys.contains).toSeq ++ collectedKeys) - collectedValues.foreach { case (key, values) ⇒ + collectedValues.foreach { case (key, values) => if (isListVariable(opName, plan, key)) m.addMapNodeElem(builder, key, m.arrayNode(values.toVector.asInstanceOf[Vector[m.Node]]), false) else if (values.nonEmpty) @@ -218,7 +218,7 @@ object BatchExecutor { } // add original vars - origVariables.foreach { case (key, value) ⇒ + origVariables.foreach { case (key, value) => if (!collectedValues.contains(key)) m.addMapNodeElem(builder, key, value.asInstanceOf[m.Node], false) } @@ -231,24 +231,24 @@ object BatchExecutor { val op = plan.exportOperations(opName) op.variableDefs.find(_.name == variableName) match { - case Some(definition) ⇒ isInputList(definition.tpe) - case None ⇒ op.variableUsages.find(_.node.name == variableName) match { - case Some(usage) if usage.tpe.isDefined ⇒ isInputList(usage.tpe.get) - case _ ⇒ true + case Some(definition) => isInputList(definition.tpe) + case None => op.variableUsages.find(_.node.name == variableName) match { + case Some(usage) if usage.tpe.isDefined => isInputList(usage.tpe.get) + case _ => true } } } private def isInputList(tpe: Type): Boolean = tpe match { - case _: ListInputType[_] ⇒ true - case OptionInputType(ofType) ⇒ isInputList(ofType) - case _ ⇒ false + case _: ListInputType[_] => true + case OptionInputType(ofType) => isInputList(ofType) + case _ => false } private def isInputList(tpe: ast.Type): Boolean = tpe match { - case _: ast.ListType ⇒ true - case ast.NotNullType(ofType, _) ⇒ isInputList(ofType) - case _ ⇒ false + case _: ast.ListType => true + case ast.NotNullType(ofType, _) => isInputList(ofType) + case _ => false } private def executeIndividual[Ctx, Root, Input]( @@ -269,9 +269,9 @@ object BatchExecutor { if (operationNames.isEmpty) Failure(OperationSelectionError(s"List of operations to execute in batch is empty.", exceptionHandler)) else - operationNames.find(op ⇒ !document.operations.contains(Some(op))) match { - case Some(opName) ⇒ Failure(OperationSelectionError(s"Unknown operation name '$opName'.", exceptionHandler)) - case None ⇒ Success(()) + operationNames.find(op => !document.operations.contains(Some(op))) match { + case Some(opName) => Failure(OperationSelectionError(s"Unknown operation name '$opName'.", exceptionHandler)) + case None => Success(()) } private def calcExecutionPlan(schema: Schema[_, _], queryAst: ast.Document, operationNames: Seq[String], allowedToInferVariableDefinitions: Boolean, exceptionHandler: ExceptionHandler): Try[(ast.Document, BatchExecutionPlan)] = { @@ -279,26 +279,26 @@ object BatchExecutor { val collectResult = exportedAll.foldLeft(Success(exportedAll): Try[mutable.HashMap[String, ExportOperation]]) { - case (s @ Success(ops), (opName, op)) ⇒ + case (s @ Success(ops), (opName, op)) => collectFragmentInfo(op, exportFragments, exceptionHandler) match { - case Success(o) ⇒ + case Success(o) => ops(opName) = o s - case Failure(e) ⇒ Failure(e) + case Failure(e) => Failure(e) } - case (f @ Failure(_), _) ⇒ f + case (f @ Failure(_), _) => f } val exportedRelevant = exportedAll.filterKeys(operationNames.contains).toMap collectResult - .flatMap { _ ⇒ + .flatMap { _ => if (allowedToInferVariableDefinitions) inferVariableDefinitions(exportedAll, queryAst, exceptionHandler) else { val violations = - exportedAll.values.flatMap { op ⇒ + exportedAll.values.flatMap { op => findUndefinedVariableUsages(op).map(UndefinedVariableDefinitionViolation(op.operationName, _, queryAst.sourceMapper, op.variableUsages.flatMap(_.node.location).toList)) } @@ -308,14 +308,14 @@ object BatchExecutor { Success(queryAst) } } - .flatMap { updatedQueryAst ⇒ + .flatMap { updatedQueryAst => val exportedVars = findExportedVariableNames(exportedRelevant) val dependencies = findOperationDependencies(exportedRelevant, exportedVars) validateCircularOperationDependencies(updatedQueryAst, dependencies, exceptionHandler) } - .map { case (updatedQueryAst, dependencies) ⇒ - updatedQueryAst → BatchExecutionPlan(exportedRelevant, dependencies) + .map { case (updatedQueryAst, dependencies) => + updatedQueryAst -> BatchExecutionPlan(exportedRelevant, dependencies) } } @@ -326,28 +326,28 @@ object BatchExecutor { if (path.exists(_._1 == src)) violations += CircularOperationDependencyViolation(src, path.map(_._2), queryAst.sourceMapper, queryAst.operations(Some(src)).location.toList) else { - deps.foreach { d ⇒ + deps.foreach { d => loop(d._1, dependencies(d._1), path :+ ((src, s"$src(${d._2.map("$" + _).mkString(", ")})"))) } } - dependencies.foreach { case (op, deps) ⇒ + dependencies.foreach { case (op, deps) => loop(op, deps, Vector.empty) } if (violations.nonEmpty) Failure(BatchExecutionViolationError(violations.toVector, exceptionHandler)) else - Success(queryAst → dependencies) + Success(queryAst -> dependencies) } private def findOperationDependencies(exportOperations: Map[String, ExportOperation], exportedVars: Set[String]): Map[String, Vector[(String, Set[String])]] = - exportOperations.values.map { src ⇒ + exportOperations.values.map { src => val requires = (src.variableDefs.map(_.name).filter(exportedVars.contains) ++ src.variableUsages.map(_.node.name).filter(exportedVars.contains)).toSet - val providers = exportOperations.values.map(dst ⇒ dst.operationName → dst.exports.map(_.exportedName).toSet.intersect(requires)).filter(_._2.nonEmpty) + val providers = exportOperations.values.map(dst => dst.operationName -> dst.exports.map(_.exportedName).toSet.intersect(requires)).filter(_._2.nonEmpty) - src.operationName → providers.toVector + src.operationName -> providers.toVector }.toMap private def findExportedVariableNames(exportOperations: Map[String, ExportOperation]): Set[String] = @@ -358,7 +358,7 @@ object BatchExecutor { val updatedDocument = AstVisitor.visit(queryAst, AstVisitor { - case od: ast.OperationDefinition if od.name.isDefined && exportOperations.contains(od.name.get) ⇒ + case od: ast.OperationDefinition if od.name.isDefined && exportOperations.contains(od.name.get) => val exportOperation = exportOperations(od.name.get) val undefined = findUndefinedVariableUsages(exportOperation) @@ -366,7 +366,7 @@ object BatchExecutor { val newVariableDefs = new mutable.ListBuffer[ast.VariableDefinition] - undefined.foreach { ud ⇒ + undefined.foreach { ud => val allUsages = usagesByName(ud) val first = allUsages.head val firstType = first.tpe.getOrElse(throw new IllegalStateException("Variable usage type is not detected, but expected at this point!")) @@ -375,7 +375,7 @@ object BatchExecutor { val violations = new mutable.ListBuffer[Violation] - tail.foreach { curr ⇒ + tail.foreach { curr => val currType = curr.tpe.getOrElse(throw new IllegalStateException("Variable usage type is not detected, but expected at this point!")) val currAstType = SchemaRenderer.renderTypeNameAst(currType) @@ -423,19 +423,19 @@ object BatchExecutor { def loop(spreads: Set[SpreadInfo], seenFragmentNames: Set[String], path: Vector[String]): Unit = { - spreads.foreach { s ⇒ + spreads.foreach { s => if (seenFragmentNames.contains(s.fragmentName)) recursive += s.fragmentName else exportFragments.get(s.fragmentName) match { - case Some(f) ⇒ + case Some(f) => val spreadPath = path ++ s.path - currentExports ++= f.exports.map(e ⇒ e.copy(path = spreadPath ++ e.path)) + currentExports ++= f.exports.map(e => e.copy(path = spreadPath ++ e.path)) currentVariables ++= f.variableUsages loop(f.fragmentSpreads, seenFragmentNames + s.fragmentName, spreadPath) - case None ⇒ + case None => unknown += s.fragmentName } } @@ -467,42 +467,42 @@ object BatchExecutor { val exportOperations = new mutable.HashMap[String, ExportOperation] val exportFragments = new mutable.HashMap[String, ExportFragment] - AstVisitor.visitAstWithTypeInfo(schema, queryAst)(typeInfo ⇒ AstVisitor( + AstVisitor.visitAstWithTypeInfo(schema, queryAst)(typeInfo => AstVisitor( onEnter = { - case op: ast.OperationDefinition if op.name.isDefined ⇒ + case op: ast.OperationDefinition if op.name.isDefined => currentOperation = op.name VisitorCommand.Continue - case fd: ast.FragmentDefinition ⇒ + case fd: ast.FragmentDefinition => currentFragment = Some(fd.name) VisitorCommand.Continue - case fs: ast.FragmentSpread if currentOperation.isDefined || currentFragment.isDefined ⇒ + case fs: ast.FragmentSpread if currentOperation.isDefined || currentFragment.isDefined => currentSpreads += SpreadInfo(fs.name, calcPath(typeInfo)) VisitorCommand.Continue - case vd: ast.VariableDefinition if currentOperation.isDefined || currentFragment.isDefined ⇒ + case vd: ast.VariableDefinition if currentOperation.isDefined || currentFragment.isDefined => currentVariableDefs += vd VisitorCommand.Continue - case vv: ast.VariableValue if currentOperation.isDefined || currentFragment.isDefined ⇒ + case vv: ast.VariableValue if currentOperation.isDefined || currentFragment.isDefined => currentVariables += VariableUsage(vv, typeInfo.inputType, typeInfo.defaultValue) VisitorCommand.Continue - case field: ast.Field if currentOperation.isDefined || currentFragment.isDefined ⇒ + case field: ast.Field if currentOperation.isDefined || currentFragment.isDefined => field.directives.find(_.name == ExportDirective.name) match { - case Some(d) ⇒ d.arguments.find(_.name == AsArg.name) match { - case Some(ast.Argument(_, ast.StringValue(as, _, _, _, _), _, _)) if typeInfo.fieldDef.isDefined ⇒ + case Some(d) => d.arguments.find(_.name == AsArg.name) match { + case Some(ast.Argument(_, ast.StringValue(as, _, _, _, _), _, _)) if typeInfo.fieldDef.isDefined => currentDirectives += Export(as, calcPath(typeInfo), d, typeInfo.fieldDef.get.fieldType) VisitorCommand.Continue - case _ ⇒ VisitorCommand.Continue + case _ => VisitorCommand.Continue } - case None ⇒ VisitorCommand.Continue + case None => VisitorCommand.Continue } }, onLeave = { - case od: ast.OperationDefinition if od.name.isDefined ⇒ + case od: ast.OperationDefinition if od.name.isDefined => val name = od.name.get currentOperation = None @@ -520,7 +520,7 @@ object BatchExecutor { VisitorCommand.Continue - case fd: ast.FragmentDefinition ⇒ + case fd: ast.FragmentDefinition => currentFragment = None exportFragments(fd.name) = ExportFragment(fd.name, @@ -537,9 +537,9 @@ object BatchExecutor { } )) - exportOperations → exportFragments + exportOperations -> exportFragments } private def calcPath(typeInfo: TypeInfo) = - typeInfo.ancestors.collect{case f: ast.Field ⇒ f.outputName}.toVector -} \ No newline at end of file + typeInfo.ancestors.collect{case f: ast.Field => f.outputName}.toVector +} diff --git a/src/main/scala/sangria/execution/deferred/DeferredResolver.scala b/src/main/scala/sangria/execution/deferred/DeferredResolver.scala index 7823b32f..2ea3bbd0 100644 --- a/src/main/scala/sangria/execution/deferred/DeferredResolver.scala +++ b/src/main/scala/sangria/execution/deferred/DeferredResolver.scala @@ -7,7 +7,7 @@ import sangria.schema.{Args, Field} import scala.concurrent.{ExecutionContext, Future} trait DeferredResolver[-Ctx] { - def includeDeferredFromField: Option[(Field[_, _], Vector[ast.Field], Args, Double) ⇒ Boolean] = None + def includeDeferredFromField: Option[(Field[_, _], Vector[ast.Field], Args, Double) => Boolean] = None def groupDeferred[T <: DeferredWithInfo](deferred: Vector[T]): Vector[Vector[T]] = Vector(deferred) @@ -20,7 +20,7 @@ trait DeferredResolver[-Ctx] { object DeferredResolver { val empty = new DeferredResolver[Any] { override def resolve(deferred: Vector[Deferred[Any]], ctx: Any, queryState: Any)(implicit ec: ExecutionContext) = - deferred map (d ⇒ Future.failed(UnsupportedDeferError(d))) + deferred map (d => Future.failed(UnsupportedDeferError(d))) } def fetchers[Ctx](fetchers: Fetcher[Ctx, _, _, _]*): DeferredResolver[Ctx] = diff --git a/src/main/scala/sangria/execution/deferred/Fetcher.scala b/src/main/scala/sangria/execution/deferred/Fetcher.scala index ebb54515..158abed2 100644 --- a/src/main/scala/sangria/execution/deferred/Fetcher.scala +++ b/src/main/scala/sangria/execution/deferred/Fetcher.scala @@ -1,12 +1,12 @@ package sangria.execution.deferred -import scala.collection.mutable.{Set ⇒ MutableSet, Map ⇒ MutableMap} +import scala.collection.mutable.{Set => MutableSet, Map => MutableMap} import scala.concurrent.Future class Fetcher[Ctx, Res, RelRes, Id]( - val idFn: Res ⇒ Id, - val fetch: (FetcherContext[Ctx], Seq[Id]) ⇒ Future[Seq[Res]], - val fetchRel: (FetcherContext[Ctx], RelationIds[Res]) ⇒ Future[Seq[RelRes]], + val idFn: Res => Id, + val fetch: (FetcherContext[Ctx], Seq[Id]) => Future[Seq[Res]], + val fetchRel: (FetcherContext[Ctx], RelationIds[Res]) => Future[Seq[RelRes]], val config: FetcherConfig ) { def defer(id: Id) = FetcherDeferredOne(this, id) @@ -33,26 +33,26 @@ class Fetcher[Ctx, Res, RelRes, Id]( def clearCachedRelId[RelId](deferredResolverState: Any, rel: Relation[Res, _, RelId], relId: RelId) = findCache(deferredResolverState)(_.clearRelId(rel, relId)) - private def findCache(deferredResolverState: Any)(op: FetcherCache ⇒ Unit): Unit = + private def findCache(deferredResolverState: Any)(op: FetcherCache => Unit): Unit = deferredResolverState match { - case map: Map[AnyRef, FetcherCache] @unchecked ⇒ map.get(this) match { - case Some(cache) ⇒ op(cache) - case None ⇒ // just ignore + case map: Map[AnyRef, FetcherCache] @unchecked => map.get(this) match { + case Some(cache) => op(cache) + case None => // just ignore } - case _ ⇒ // just ignore + case _ => // just ignore } def ids(deferred: Vector[Deferred[Any]]): Vector[Id] = { val allIds = MutableSet[Id]() deferred foreach { - case FetcherDeferredOne(s, id) if s eq this ⇒ allIds += id.asInstanceOf[Id] - case FetcherDeferredOpt(s, id) if s eq this ⇒ allIds += id.asInstanceOf[Id] - case FetcherDeferredOptOpt(s, Some(id)) if s eq this ⇒ allIds += id.asInstanceOf[Id] - case FetcherDeferredSeq(s, ids) if s eq this ⇒ allIds ++= ids.asInstanceOf[Seq[Id]] - case FetcherDeferredSeqOpt(s, ids) if s eq this ⇒ allIds ++= ids.asInstanceOf[Seq[Id]] - case FetcherDeferredSeqOptExplicit(s, ids) if s eq this ⇒ allIds ++= ids.asInstanceOf[Seq[Id]] - case _ ⇒ // skip + case FetcherDeferredOne(s, id) if s eq this => allIds += id.asInstanceOf[Id] + case FetcherDeferredOpt(s, id) if s eq this => allIds += id.asInstanceOf[Id] + case FetcherDeferredOptOpt(s, Some(id)) if s eq this => allIds += id.asInstanceOf[Id] + case FetcherDeferredSeq(s, ids) if s eq this => allIds ++= ids.asInstanceOf[Seq[Id]] + case FetcherDeferredSeqOpt(s, ids) if s eq this => allIds ++= ids.asInstanceOf[Seq[Id]] + case FetcherDeferredSeqOptExplicit(s, ids) if s eq this => allIds ++= ids.asInstanceOf[Seq[Id]] + case _ => // skip } allIds.toVector @@ -63,80 +63,80 @@ class Fetcher[Ctx, Res, RelRes, Id]( def addToSet(rel: Relation[Any, Any, Any], id: Any) = allIds.get(rel) match { - case Some(set) ⇒ set += id - case None ⇒ + case Some(set) => set += id + case None => val set = MutableSet[Any]() set += id allIds(rel) = set } deferred foreach { - case FetcherDeferredRel(s, rel, relId) if s eq this ⇒ addToSet(rel, relId) - case FetcherDeferredRelOpt(s, rel, relId) if s eq this ⇒ addToSet(rel, relId) - case FetcherDeferredRelSeq(s, rel, relId) if s eq this ⇒ addToSet(rel, relId) - case FetcherDeferredRelSeqMany(s, rel, relIds) if s eq this ⇒ relIds.foreach(addToSet(rel, _)) - case _ ⇒ // skip + case FetcherDeferredRel(s, rel, relId) if s eq this => addToSet(rel, relId) + case FetcherDeferredRelOpt(s, rel, relId) if s eq this => addToSet(rel, relId) + case FetcherDeferredRelSeq(s, rel, relId) if s eq this => addToSet(rel, relId) + case FetcherDeferredRelSeqMany(s, rel, relIds) if s eq this => relIds.foreach(addToSet(rel, _)) + case _ => // skip } - allIds.map{case (k, v) ⇒ k → v.toVector}.toMap + allIds.map{case (k, v) => k -> v.toVector}.toMap } def isRel(deferred: Deferred[Any]) = deferred match { case FetcherDeferredRel(_, _, _) | FetcherDeferredRelOpt(_, _, _) | FetcherDeferredRelSeq(_, _, _) | - FetcherDeferredRelSeqMany(_, _, _) ⇒ true - case _ ⇒ false + FetcherDeferredRelSeqMany(_, _, _) => true + case _ => false } } object Fetcher { - private def relationUnsupported[Ctx, Id, Res]: (FetcherContext[Ctx], RelationIds[Res]) ⇒ Future[Seq[Res]] = - (_, _) ⇒ Future.failed(new RelationNotSupportedError) + private def relationUnsupported[Ctx, Id, Res]: (FetcherContext[Ctx], RelationIds[Res]) => Future[Seq[Res]] = + (_, _) => Future.failed(new RelationNotSupportedError) - private def relationOnlySupported[Ctx, Id, Res]: (FetcherContext[Ctx], Seq[Id]) ⇒ Future[Seq[Res]] = - (_, _) ⇒ Future.failed(new RelationOnlySupportedError) + private def relationOnlySupported[Ctx, Id, Res]: (FetcherContext[Ctx], Seq[Id]) => Future[Seq[Res]] = + (_, _) => Future.failed(new RelationOnlySupportedError) - def apply[Ctx, Res, Id](fetch: (Ctx, Seq[Id]) ⇒ Future[Seq[Res]], config: FetcherConfig = FetcherConfig.empty)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, Res, Id] = - new Fetcher[Ctx, Res, Res, Id](i ⇒ id.id(i), (c, ids) ⇒ fetch(c.ctx, ids), relationUnsupported, config) + def apply[Ctx, Res, Id](fetch: (Ctx, Seq[Id]) => Future[Seq[Res]], config: FetcherConfig = FetcherConfig.empty)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, Res, Id] = + new Fetcher[Ctx, Res, Res, Id](i => id.id(i), (c, ids) => fetch(c.ctx, ids), relationUnsupported, config) - def withContext[Ctx, Res, Id](fetch: (FetcherContext[Ctx], Seq[Id]) ⇒ Future[Seq[Res]], config: FetcherConfig = FetcherConfig.empty)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, Res, Id] = - new Fetcher[Ctx, Res, Res, Id](i ⇒ id.id(i), fetch, relationUnsupported, config) + def withContext[Ctx, Res, Id](fetch: (FetcherContext[Ctx], Seq[Id]) => Future[Seq[Res]], config: FetcherConfig = FetcherConfig.empty)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, Res, Id] = + new Fetcher[Ctx, Res, Res, Id](i => id.id(i), fetch, relationUnsupported, config) - def rel[Ctx, Res, RelRes, Id](fetch: (Ctx, Seq[Id]) ⇒ Future[Seq[Res]], fetchRel: (Ctx, RelationIds[Res]) ⇒ Future[Seq[RelRes]], config: FetcherConfig = FetcherConfig.empty)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, RelRes, Id] = - new Fetcher[Ctx, Res, RelRes, Id](i ⇒ id.id(i), (c, ids) ⇒ fetch(c.ctx, ids), (c, ids) ⇒ fetchRel(c.ctx, ids), config) + def rel[Ctx, Res, RelRes, Id](fetch: (Ctx, Seq[Id]) => Future[Seq[Res]], fetchRel: (Ctx, RelationIds[Res]) => Future[Seq[RelRes]], config: FetcherConfig = FetcherConfig.empty)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, RelRes, Id] = + new Fetcher[Ctx, Res, RelRes, Id](i => id.id(i), (c, ids) => fetch(c.ctx, ids), (c, ids) => fetchRel(c.ctx, ids), config) - def relWithContext[Ctx, Res, RelRes, Id](fetch: (FetcherContext[Ctx], Seq[Id]) ⇒ Future[Seq[Res]], fetchRel: (FetcherContext[Ctx], RelationIds[Res]) ⇒ Future[Seq[RelRes]], config: FetcherConfig = FetcherConfig.empty)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, RelRes, Id] = - new Fetcher[Ctx, Res, RelRes, Id](i ⇒ id.id(i), fetch, fetchRel, config) + def relWithContext[Ctx, Res, RelRes, Id](fetch: (FetcherContext[Ctx], Seq[Id]) => Future[Seq[Res]], fetchRel: (FetcherContext[Ctx], RelationIds[Res]) => Future[Seq[RelRes]], config: FetcherConfig = FetcherConfig.empty)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, RelRes, Id] = + new Fetcher[Ctx, Res, RelRes, Id](i => id.id(i), fetch, fetchRel, config) - def relOnly[Ctx, Res, RelRes, Id](fetchRel: (Ctx, RelationIds[Res]) ⇒ Future[Seq[RelRes]], config: FetcherConfig = FetcherConfig.empty)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, RelRes, Id] = - new Fetcher[Ctx, Res, RelRes, Id](i ⇒ id.id(i), relationOnlySupported, (c, ids) ⇒ fetchRel(c.ctx, ids), config) + def relOnly[Ctx, Res, RelRes, Id](fetchRel: (Ctx, RelationIds[Res]) => Future[Seq[RelRes]], config: FetcherConfig = FetcherConfig.empty)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, RelRes, Id] = + new Fetcher[Ctx, Res, RelRes, Id](i => id.id(i), relationOnlySupported, (c, ids) => fetchRel(c.ctx, ids), config) - def relOnlyWithContext[Ctx, Res, RelRes, Id](fetchRel: (FetcherContext[Ctx], RelationIds[Res]) ⇒ Future[Seq[RelRes]], config: FetcherConfig = FetcherConfig.empty)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, RelRes, Id] = - new Fetcher[Ctx, Res, RelRes, Id](i ⇒ id.id(i), relationOnlySupported, fetchRel, config) + def relOnlyWithContext[Ctx, Res, RelRes, Id](fetchRel: (FetcherContext[Ctx], RelationIds[Res]) => Future[Seq[RelRes]], config: FetcherConfig = FetcherConfig.empty)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, RelRes, Id] = + new Fetcher[Ctx, Res, RelRes, Id](i => id.id(i), relationOnlySupported, fetchRel, config) - def caching[Ctx, Res, Id](fetch: (Ctx, Seq[Id]) ⇒ Future[Seq[Res]], config: FetcherConfig = FetcherConfig.caching)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, Res, Id] = - new Fetcher[Ctx, Res, Res, Id](i ⇒ id.id(i), (c, ids) ⇒ fetch(c.ctx, ids), relationUnsupported, config) + def caching[Ctx, Res, Id](fetch: (Ctx, Seq[Id]) => Future[Seq[Res]], config: FetcherConfig = FetcherConfig.caching)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, Res, Id] = + new Fetcher[Ctx, Res, Res, Id](i => id.id(i), (c, ids) => fetch(c.ctx, ids), relationUnsupported, config) - def cachingWithContext[Ctx, Res, Id](fetch: (FetcherContext[Ctx], Seq[Id]) ⇒ Future[Seq[Res]], config: FetcherConfig = FetcherConfig.caching)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, Res, Id] = - new Fetcher[Ctx, Res, Res, Id](i ⇒ id.id(i), fetch, relationUnsupported, config) + def cachingWithContext[Ctx, Res, Id](fetch: (FetcherContext[Ctx], Seq[Id]) => Future[Seq[Res]], config: FetcherConfig = FetcherConfig.caching)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, Res, Id] = + new Fetcher[Ctx, Res, Res, Id](i => id.id(i), fetch, relationUnsupported, config) - def relCaching[Ctx, Res, RelRes, Id](fetch: (Ctx, Seq[Id]) ⇒ Future[Seq[Res]], fetchRel: (Ctx, RelationIds[Res]) ⇒ Future[Seq[RelRes]], config: FetcherConfig = FetcherConfig.caching)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, RelRes, Id] = - new Fetcher[Ctx, Res, RelRes, Id](i ⇒ id.id(i), (c, ids) ⇒ fetch(c.ctx, ids), (c, ids) ⇒ fetchRel(c.ctx, ids), config) + def relCaching[Ctx, Res, RelRes, Id](fetch: (Ctx, Seq[Id]) => Future[Seq[Res]], fetchRel: (Ctx, RelationIds[Res]) => Future[Seq[RelRes]], config: FetcherConfig = FetcherConfig.caching)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, RelRes, Id] = + new Fetcher[Ctx, Res, RelRes, Id](i => id.id(i), (c, ids) => fetch(c.ctx, ids), (c, ids) => fetchRel(c.ctx, ids), config) - def relCachingWithContext[Ctx, Res, RelRes, Id](fetch: (FetcherContext[Ctx], Seq[Id]) ⇒ Future[Seq[Res]], fetchRel: (FetcherContext[Ctx], RelationIds[Res]) ⇒ Future[Seq[RelRes]], config: FetcherConfig = FetcherConfig.caching)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, RelRes, Id] = - new Fetcher[Ctx, Res, RelRes, Id](i ⇒ id.id(i), fetch, fetchRel, config) + def relCachingWithContext[Ctx, Res, RelRes, Id](fetch: (FetcherContext[Ctx], Seq[Id]) => Future[Seq[Res]], fetchRel: (FetcherContext[Ctx], RelationIds[Res]) => Future[Seq[RelRes]], config: FetcherConfig = FetcherConfig.caching)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, RelRes, Id] = + new Fetcher[Ctx, Res, RelRes, Id](i => id.id(i), fetch, fetchRel, config) - def relOnlyCaching[Ctx, Res, RelRes, Id](fetchRel: (Ctx, RelationIds[Res]) ⇒ Future[Seq[RelRes]], config: FetcherConfig = FetcherConfig.caching)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, RelRes, Id] = - new Fetcher[Ctx, Res, RelRes, Id](i ⇒ id.id(i), relationOnlySupported, (c, ids) ⇒ fetchRel(c.ctx, ids), config) + def relOnlyCaching[Ctx, Res, RelRes, Id](fetchRel: (Ctx, RelationIds[Res]) => Future[Seq[RelRes]], config: FetcherConfig = FetcherConfig.caching)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, RelRes, Id] = + new Fetcher[Ctx, Res, RelRes, Id](i => id.id(i), relationOnlySupported, (c, ids) => fetchRel(c.ctx, ids), config) - def relOnlyCachingWithContext[Ctx, Res, RelRes, Id](fetchRel: (FetcherContext[Ctx], RelationIds[Res]) ⇒ Future[Seq[RelRes]], config: FetcherConfig = FetcherConfig.caching)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, RelRes, Id] = - new Fetcher[Ctx, Res, RelRes, Id](i ⇒ id.id(i), relationOnlySupported, fetchRel, config) + def relOnlyCachingWithContext[Ctx, Res, RelRes, Id](fetchRel: (FetcherContext[Ctx], RelationIds[Res]) => Future[Seq[RelRes]], config: FetcherConfig = FetcherConfig.caching)(implicit id: HasId[Res, Id]): Fetcher[Ctx, Res, RelRes, Id] = + new Fetcher[Ctx, Res, RelRes, Id](i => id.id(i), relationOnlySupported, fetchRel, config) } -case class FetcherConfig(cacheConfig: Option[() ⇒ FetcherCache] = None, maxBatchSizeConfig: Option[Int] = None) { - def caching = copy(cacheConfig = Some(() ⇒ FetcherCache.simple)) - def caching(cache: FetcherCache) = copy(cacheConfig = Some(() ⇒ cache)) +case class FetcherConfig(cacheConfig: Option[() => FetcherCache] = None, maxBatchSizeConfig: Option[Int] = None) { + def caching = copy(cacheConfig = Some(() => FetcherCache.simple)) + def caching(cache: FetcherCache) = copy(cacheConfig = Some(() => cache)) def maxBatchSize(size: Int) = copy(maxBatchSizeConfig = Some(size)) } @@ -208,19 +208,19 @@ trait Relation[T, Tmp, RelId] { } object Relation { - def apply[T, RelId](name: String, idFn: T ⇒ Seq[RelId]): Relation[T, T, RelId] = + def apply[T, RelId](name: String, idFn: T => Seq[RelId]): Relation[T, T, RelId] = SimpleRelation[T, T, RelId](name)(idFn, identity) - def apply[T, Tmp, RelId](name: String, idFn: Tmp ⇒ Seq[RelId], mapFn: Tmp ⇒ T): Relation[T, Tmp, RelId] = + def apply[T, Tmp, RelId](name: String, idFn: Tmp => Seq[RelId], mapFn: Tmp => T): Relation[T, Tmp, RelId] = SimpleRelation[T, Tmp, RelId](name)(idFn, mapFn) } -abstract class AbstractRelation[T, Tmp, RelId](idFn: Tmp ⇒ Seq[RelId], mapFn: Tmp ⇒ T) extends Relation[T, Tmp, RelId] { +abstract class AbstractRelation[T, Tmp, RelId](idFn: Tmp => Seq[RelId], mapFn: Tmp => T) extends Relation[T, Tmp, RelId] { def relIds(value: Tmp) = idFn(value) def map(value: Tmp) = mapFn(value) } -case class SimpleRelation[T, Tmp, RelId](name: String)(idFn: Tmp ⇒ Seq[RelId], mapFn: Tmp ⇒ T) extends AbstractRelation[T, Tmp, RelId](idFn, mapFn) +case class SimpleRelation[T, Tmp, RelId](name: String)(idFn: Tmp => Seq[RelId], mapFn: Tmp => T) extends AbstractRelation[T, Tmp, RelId](idFn, mapFn) case class RelationIds[Res](rawIds: Map[Relation[Res, _, _], Seq[_]]) { def apply[RelId](relation: Relation[Res, _, RelId]): Seq[RelId] = @@ -243,4 +243,4 @@ case class FetcherContext[Ctx]( class RelationNotSupportedError extends Exception(s"Relations are not supported by Fetcher.") -class RelationOnlySupportedError extends Exception(s"Only relations are supported by Fetcher.") \ No newline at end of file +class RelationOnlySupportedError extends Exception(s"Only relations are supported by Fetcher.") diff --git a/src/main/scala/sangria/execution/deferred/FetcherBasedDeferredResolver.scala b/src/main/scala/sangria/execution/deferred/FetcherBasedDeferredResolver.scala index d7cb5907..a2989db7 100644 --- a/src/main/scala/sangria/execution/deferred/FetcherBasedDeferredResolver.scala +++ b/src/main/scala/sangria/execution/deferred/FetcherBasedDeferredResolver.scala @@ -6,44 +6,44 @@ import scala.annotation.unchecked.uncheckedVariance import scala.collection.immutable.VectorBuilder import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success, Try} -import scala.collection.mutable.{Map ⇒ MutableMap, Set ⇒ MutableSet} +import scala.collection.mutable.{Map => MutableMap, Set => MutableSet} class FetcherBasedDeferredResolver[-Ctx](fetchers: Vector[Fetcher[Ctx, _, _, _]], fallback: Option[DeferredResolver[Ctx]]) extends DeferredResolver[Ctx] { private val fetchersMap: Map[AnyRef, Fetcher[Ctx, _, _, _]] @uncheckedVariance = - fetchers.map(f ⇒ f → f).toMap + fetchers.map(f => f -> f).toMap override def groupDeferred[T <: DeferredWithInfo](deferred: Vector[T]) = fallback match { - case Some(f) ⇒ f.groupDeferred(deferred) - case None ⇒ super.groupDeferred(deferred) + case Some(f) => f.groupDeferred(deferred) + case None => super.groupDeferred(deferred) } override val includeDeferredFromField = fallback.flatMap(_.includeDeferredFromField) orElse super.includeDeferredFromField - override def initialQueryState = fetchers.flatMap(f ⇒ f.config.cacheConfig.map(cacheFn ⇒ (f: AnyRef) → cacheFn())).toMap + override def initialQueryState = fetchers.flatMap(f => f.config.cacheConfig.map(cacheFn => (f: AnyRef) -> cacheFn())).toMap def resolve(deferred: Vector[Deferred[Any]], ctx: Ctx, queryState: Any)(implicit ec: ExecutionContext) = { val fetcherCaches = queryState.asInstanceOf[Map[AnyRef, FetcherCache]] val grouped = deferred groupBy { - case FetcherDeferredOne(s, _) ⇒ fetchersMap.get(s) - case FetcherDeferredOpt(s, _) ⇒ fetchersMap.get(s) - case FetcherDeferredOptOpt(s, _) ⇒ fetchersMap.get(s) - case FetcherDeferredSeq(s, _) ⇒ fetchersMap.get(s) - case FetcherDeferredSeqOpt(s, _) ⇒ fetchersMap.get(s) - case FetcherDeferredSeqOptExplicit(s, _) ⇒ fetchersMap.get(s) - case FetcherDeferredRel(s, _, _) ⇒ fetchersMap.get(s) - case FetcherDeferredRelOpt(s, _, _) ⇒ fetchersMap.get(s) - case FetcherDeferredRelSeq(s, _, _) ⇒ fetchersMap.get(s) - case FetcherDeferredRelSeqMany(s, _, _) ⇒ fetchersMap.get(s) - case _ ⇒ None + case FetcherDeferredOne(s, _) => fetchersMap.get(s) + case FetcherDeferredOpt(s, _) => fetchersMap.get(s) + case FetcherDeferredOptOpt(s, _) => fetchersMap.get(s) + case FetcherDeferredSeq(s, _) => fetchersMap.get(s) + case FetcherDeferredSeqOpt(s, _) => fetchersMap.get(s) + case FetcherDeferredSeqOptExplicit(s, _) => fetchersMap.get(s) + case FetcherDeferredRel(s, _, _) => fetchersMap.get(s) + case FetcherDeferredRelOpt(s, _, _) => fetchersMap.get(s) + case FetcherDeferredRelSeq(s, _, _) => fetchersMap.get(s) + case FetcherDeferredRelSeqMany(s, _, _) => fetchersMap.get(s) + case _ => None } val resolved = MutableMap[Deferred[Any], Future[Any]]() grouped foreach { - case (Some(fetcher), d) ⇒ + case (Some(fetcher), d) => val fetcherCache = fetcherCaches.get(fetcher) val (relDeferred, normalDeferred) = d partition fetcher.isRel val fetcherContext = FetcherContext[Ctx](ctx, fetcher, fetcherCache, fetcherCaches, fetchers) @@ -51,21 +51,21 @@ class FetcherBasedDeferredResolver[-Ctx](fetchers: Vector[Fetcher[Ctx, _, _, _]] resolveRelations(fetcherContext, relDeferred, resolved) resolveEntities(fetcherContext, normalDeferred, resolved) - case (None, deferred) ⇒ + case (None, deferred) => fallback match { - case Some(f) ⇒ + case Some(f) => val res = f.resolve(deferred, ctx, queryState) - for (i ← deferred.indices) { + for (i <- deferred.indices) { resolved(deferred(i)) = res(i) } - case None ⇒ - deferred.foreach(d ⇒ resolved(d) = Future.failed(UnsupportedDeferError(d))) + case None => + deferred.foreach(d => resolved(d) = Future.failed(UnsupportedDeferError(d))) } } - deferred map (d ⇒ resolved(d)) + deferred map (d => resolved(d)) } private def resolveRelations( @@ -85,51 +85,51 @@ class FetcherBasedDeferredResolver[-Ctx](fetchers: Vector[Fetcher[Ctx, _, _, _]] else Future.successful(MutableMap.empty[Relation[Any, Any, Any], MutableMap[Any, Seq[Any]]]) - deferredToResolve foreach { deferred ⇒ - resolved(deferred) = newResults.map { m ⇒ + deferredToResolve foreach { deferred => + resolved(deferred) = newResults.map { m => val f = ctx.fetcher.asInstanceOf[Fetcher[Any, Any, Any, Any]] deferred match { - case FetcherDeferredRel(_, rel, relId) if cachedResults.contains(rel) && cachedResults(rel).contains(relId) ⇒ + case FetcherDeferredRel(_, rel, relId) if cachedResults.contains(rel) && cachedResults(rel).contains(relId) => cachedResults(rel)(relId).headOption match { - case Some(head) ⇒ head - case None ⇒ throw AbsentDeferredRelValueError(f, deferred, rel, relId) + case Some(head) => head + case None => throw AbsentDeferredRelValueError(f, deferred, rel, relId) } - case FetcherDeferredRel(_, rel, relId) if m.contains(rel) && m(rel).contains(relId) ⇒ + case FetcherDeferredRel(_, rel, relId) if m.contains(rel) && m(rel).contains(relId) => m(rel)(relId).headOption match { - case Some(head) ⇒ head - case None ⇒ throw AbsentDeferredRelValueError(f, deferred, rel, relId) + case Some(head) => head + case None => throw AbsentDeferredRelValueError(f, deferred, rel, relId) } - case FetcherDeferredRel(_, rel, relId) ⇒ + case FetcherDeferredRel(_, rel, relId) => throw AbsentDeferredRelValueError(f, deferred, rel, relId) - case FetcherDeferredRelOpt(_, rel, relId) if cachedResults.contains(rel) && cachedResults(rel).contains(relId) ⇒ + case FetcherDeferredRelOpt(_, rel, relId) if cachedResults.contains(rel) && cachedResults(rel).contains(relId) => cachedResults(rel)(relId).headOption - case FetcherDeferredRelOpt(_, rel, relId) if m.contains(rel) && m(rel).contains(relId) ⇒ + case FetcherDeferredRelOpt(_, rel, relId) if m.contains(rel) && m(rel).contains(relId) => m(rel)(relId).headOption - case FetcherDeferredRelOpt(_, _, _) ⇒ + case FetcherDeferredRelOpt(_, _, _) => None - case FetcherDeferredRelSeq(_, rel, relId) if cachedResults.contains(rel) && cachedResults(rel).contains(relId) ⇒ + case FetcherDeferredRelSeq(_, rel, relId) if cachedResults.contains(rel) && cachedResults(rel).contains(relId) => cachedResults(rel)(relId) - case FetcherDeferredRelSeq(_, rel, relId) if m.contains(rel) && m(rel).contains(relId) ⇒ + case FetcherDeferredRelSeq(_, rel, relId) if m.contains(rel) && m(rel).contains(relId) => m(rel)(relId) - case FetcherDeferredRelSeq(_, _, _) ⇒ + case FetcherDeferredRelSeq(_, _, _) => Vector.empty - case FetcherDeferredRelSeqMany(_, rel, relIds) if cachedResults.contains(rel) ⇒ - removeDuplicates(f, relIds.flatMap(relId ⇒ cachedResults(rel).getOrElse(relId, Vector.empty))) + case FetcherDeferredRelSeqMany(_, rel, relIds) if cachedResults.contains(rel) => + removeDuplicates(f, relIds.flatMap(relId => cachedResults(rel).getOrElse(relId, Vector.empty))) - case FetcherDeferredRelSeqMany(_, rel, relIds) if m.contains(rel) ⇒ - removeDuplicates(f, relIds.flatMap(relId ⇒ m(rel).getOrElse(relId, Vector.empty))) + case FetcherDeferredRelSeqMany(_, rel, relIds) if m.contains(rel) => + removeDuplicates(f, relIds.flatMap(relId => m(rel).getOrElse(relId, Vector.empty))) - case FetcherDeferredRelSeqMany(_, _, _) ⇒ + case FetcherDeferredRelSeqMany(_, _, _) => Vector.empty } } @@ -139,7 +139,7 @@ class FetcherBasedDeferredResolver[-Ctx](fetchers: Vector[Fetcher[Ctx, _, _, _]] private def removeDuplicates(fetcher: Fetcher[Any, Any, Any, Any], values: Seq[Any]) = { val seen = MutableSet[Any]() - values.filter { v ⇒ + values.filter { v => val id = fetcher.idFn(v) if (seen contains id) false @@ -160,117 +160,117 @@ class FetcherBasedDeferredResolver[-Ctx](fetchers: Vector[Fetcher[Ctx, _, _, _]] val (nonCachedIds, cachedResults) = partitionCached(ctx.cache, ids) val groupedIds = ctx.fetcher.config.maxBatchSizeConfig match { - case Some(size) ⇒ nonCachedIds.grouped(size) - case None ⇒ Iterator(nonCachedIds) + case Some(size) => nonCachedIds.grouped(size) + case None => Iterator(nonCachedIds) } - val results = groupedIds map { group ⇒ + val results = groupedIds map { group => if (group.nonEmpty) - f.fetch(ctx, group).map(r ⇒ group → Success(r): (Vector[Any], Try[Seq[Any]])).recover {case e ⇒ group → Failure(e)} + f.fetch(ctx, group).map(r => group -> Success(r): (Vector[Any], Try[Seq[Any]])).recover {case e => group -> Failure(e)} else - Future.successful(group → Success(Seq.empty)) + Future.successful(group -> Success(Seq.empty)) } - val futureRes = Future.sequence(results).map { allResults ⇒ + val futureRes = Future.sequence(results).map { allResults => val byId = MutableMap[Any, Any]() // can contain either exception or actual value! (using `Any` to avoid unnecessary boxing) - allResults.toVector.foreach { case (group, groupResult) ⇒ + allResults.toVector.foreach { case (group, groupResult) => groupResult match { - case Success(values) ⇒ - values.foreach(v ⇒ byId(f.idFn(v)) = v) + case Success(values) => + values.foreach(v => byId(f.idFn(v)) = v) - case Failure(e) ⇒ - group.foreach(id ⇒ byId(id) = e) + case Failure(e) => + group.foreach(id => byId(id) = e) } } byId } - deferredToResolve foreach { deferred ⇒ - resolved(deferred) = futureRes.map { m ⇒ + deferredToResolve foreach { deferred => + resolved(deferred) = futureRes.map { m => val f = ctx.fetcher.asInstanceOf[Fetcher[Any, Any, Any, Any]] def updateCache[T](id: Any, v: T): T = ctx.cache match { - case Some(cache) ⇒ + case Some(cache) => cache.update(id, v) v - case None ⇒ v + case None => v } deferred match { - case FetcherDeferredOne(_, id) if cachedResults contains id ⇒ + case FetcherDeferredOne(_, id) if cachedResults contains id => cachedResults(id) - case FetcherDeferredOne(_, id) ⇒ + case FetcherDeferredOne(_, id) => m.get(id) match { - case Some(t: Throwable) ⇒ throw t - case Some(v) ⇒ updateCache(id, v) - case None ⇒ throw AbsentDeferredValueError(f, deferred, id) + case Some(t: Throwable) => throw t + case Some(v) => updateCache(id, v) + case None => throw AbsentDeferredValueError(f, deferred, id) } - case FetcherDeferredOpt(_, id) if cachedResults contains id ⇒ + case FetcherDeferredOpt(_, id) if cachedResults contains id => cachedResults.get(id) - case FetcherDeferredOpt(_, id) ⇒ + case FetcherDeferredOpt(_, id) => m.get(id) match { - case Some(t: Throwable) ⇒ throw t - case v ⇒ + case Some(t: Throwable) => throw t + case v => v foreach (updateCache(id, _)) v } - case FetcherDeferredOptOpt(_, None) ⇒ + case FetcherDeferredOptOpt(_, None) => None - case FetcherDeferredOptOpt(_, Some(id)) if cachedResults contains id ⇒ + case FetcherDeferredOptOpt(_, Some(id)) if cachedResults contains id => cachedResults.get(id) - case FetcherDeferredOptOpt(_, Some(id)) ⇒ + case FetcherDeferredOptOpt(_, Some(id)) => m.get(id) match { - case Some(t: Throwable) ⇒ throw t - case v ⇒ + case Some(t: Throwable) => throw t + case v => v foreach (updateCache(id, _)) v } - case FetcherDeferredSeq(_, ids) ⇒ - ids map { id ⇒ + case FetcherDeferredSeq(_, ids) => + ids map { id => if (cachedResults contains id) cachedResults(id) else m.get(id) match { - case Some(t: Throwable) ⇒ throw t - case Some(v) ⇒ updateCache(id, v) - case None ⇒ throw AbsentDeferredValueError(f, deferred, id) + case Some(t: Throwable) => throw t + case Some(v) => updateCache(id, v) + case None => throw AbsentDeferredValueError(f, deferred, id) } } - case FetcherDeferredSeqOpt(_, ids) ⇒ - ids flatMap { id ⇒ + case FetcherDeferredSeqOpt(_, ids) => + ids flatMap { id => if (cachedResults contains id) cachedResults.get(id) else m.get(id) match { - case Some(t: Throwable) ⇒ throw t - case v ⇒ + case Some(t: Throwable) => throw t + case v => v foreach (updateCache(id, _)) v } } - case FetcherDeferredSeqOptExplicit(_, ids) ⇒ - ids map { id ⇒ + case FetcherDeferredSeqOptExplicit(_, ids) => + ids map { id => if (cachedResults contains id) cachedResults.get(id) else m.get(id) match { - case Some(t: Throwable) ⇒ throw t - case v ⇒ + case Some(t: Throwable) => throw t + case v => v foreach (updateCache(id, _)) v @@ -283,33 +283,33 @@ class FetcherBasedDeferredResolver[-Ctx](fetchers: Vector[Fetcher[Ctx, _, _, _]] private def partitionCached(cache: Option[FetcherCache], ids: Vector[Any]): (Vector[Any], MutableMap[Any, Any]) = cache match { - case Some(c) ⇒ + case Some(c) => val misses = new VectorBuilder[Any] val hits = MutableMap[Any, Any]() - ids.foreach { id ⇒ + ids.foreach { id => c.get(id) match { - case Some(v) ⇒ hits(id) = v - case None ⇒ misses += id + case Some(v) => hits(id) = v + case None => misses += id } } - misses.result() → hits + misses.result() -> hits - case None ⇒ - ids → MutableMap.empty + case None => + ids -> MutableMap.empty } private def partitionCachedRel(cache: Option[FetcherCache], ids: Map[Relation[Any, Any, Any], Vector[Any]]): (Map[Relation[Any, Any, Any], Vector[Any]], MutableMap[Relation[Any, Any, Any], MutableMap[Any, Seq[Any]]]) = cache match { - case Some(c) ⇒ + case Some(c) => val misses = MutableMap[Relation[Any, Any, Any], MutableSet[Any]]() val hits = MutableMap[Relation[Any, Any, Any], MutableMap[Any, Seq[Any]]]() def addHit(rel: Relation[Any, Any, Any], relId: Any, res: Seq[Any]) = hits.get(rel) match { - case Some(map) ⇒ map(relId) = res - case None ⇒ + case Some(map) => map(relId) = res + case None => val map = MutableMap[Any, Seq[Any]]() map(relId) = res hits(rel) = map @@ -317,50 +317,50 @@ class FetcherBasedDeferredResolver[-Ctx](fetchers: Vector[Fetcher[Ctx, _, _, _]] def addMiss(rel: Relation[Any, Any, Any], relId: Any) = misses.get(rel) match { - case Some(set) ⇒ set += relId - case None ⇒ + case Some(set) => set += relId + case None => val set = MutableSet[Any]() set += relId misses(rel) = set } - ids.foreach { case (rel, ids) ⇒ - ids foreach { relId ⇒ + ids.foreach { case (rel, ids) => + ids foreach { relId => c.getRel(rel, relId) match { - case Some(v) ⇒ addHit(rel, relId, v) - case None ⇒ addMiss(rel, relId) + case Some(v) => addHit(rel, relId, v) + case None => addMiss(rel, relId) } } } - misses.map{case (k, v) ⇒ k → v.toVector}.toMap → hits + misses.map{case (k, v) => k -> v.toVector}.toMap -> hits - case None ⇒ - ids → MutableMap.empty + case None => + ids -> MutableMap.empty } private def groupAndCacheRelations(ctx: FetcherContext[_], relIds: Map[Relation[Any, Any, Any], Vector[Any]], result: Seq[Any]): MutableMap[Relation[Any, Any, Any], MutableMap[Any, Seq[Any]]] = { val grouped = MutableMap[Relation[Any, Any, Any], MutableMap[Any, Seq[Any]]]() def updateCache[T](rel: Relation[Any, Any, Any], relId: Any, v: Seq[T]): Seq[T] = ctx.cache match { - case Some(cache) ⇒ - cache.updateRel(rel, relId, ctx.fetcher.idFn.asInstanceOf[T ⇒ Any], v) + case Some(cache) => + cache.updateRel(rel, relId, ctx.fetcher.idFn.asInstanceOf[T => Any], v) v - case None ⇒ v + case None => v } - relIds foreach { case (rel, relIdsForRel) ⇒ + relIds foreach { case (rel, relIdsForRel) => val identified = MutableMap[Any, VectorBuilder[Any]]() - result foreach { res ⇒ + result foreach { res => val relIds = rel.relIds(res) val mappedRes = rel.map(res) - relIds foreach { relId ⇒ + relIds foreach { relId => identified.get(relId) match { - case Some(builder) ⇒ builder += mappedRes - case None ⇒ + case Some(builder) => builder += mappedRes + case None => val builder = new VectorBuilder[Any] builder += mappedRes identified(relId) = builder @@ -368,14 +368,14 @@ class FetcherBasedDeferredResolver[-Ctx](fetchers: Vector[Fetcher[Ctx, _, _, _]] } } - relIdsForRel foreach { relId ⇒ + relIdsForRel foreach { relId => val res = identified.get(relId).fold(Vector.empty[Any])(_.result()) updateCache(rel, relId, res) grouped.get(rel) match { - case Some(map) ⇒ map(relId) = res - case None ⇒ + case Some(map) => map(relId) = res + case None => val map = MutableMap[Any, Seq[Any]]() map(relId) = res grouped(rel) = map diff --git a/src/main/scala/sangria/execution/deferred/FetcherCache.scala b/src/main/scala/sangria/execution/deferred/FetcherCache.scala index caf483a0..fe5afeb9 100644 --- a/src/main/scala/sangria/execution/deferred/FetcherCache.scala +++ b/src/main/scala/sangria/execution/deferred/FetcherCache.scala @@ -13,7 +13,7 @@ trait FetcherCache { def getRel(rel: Any, relId: Any): Option[Seq[Any]] def update(id: Any, value: Any): Unit - def updateRel[T](rel: Any, relId: Any, idFn: T ⇒ Any, values: Seq[T]): Unit + def updateRel[T](rel: Any, relId: Any, idFn: T => Any, values: Seq[T]): Unit def clear(): Unit def clearId(id: Any): Unit @@ -30,7 +30,7 @@ class SimpleFetcherCache extends FetcherCache { private val relCache = Cache.empty[Any, Seq[Any]] def cacheKey(id: Any) = id - def cacheKeyRel(rel: Any, relId: Any) = rel → relId + def cacheKeyRel(rel: Any, relId: Any) = rel -> relId def cacheable(id: Any) = true def cacheableRel(rel: Any, relId: Any) = true @@ -43,9 +43,9 @@ class SimpleFetcherCache extends FetcherCache { cache.update(cacheKey(id), value) } - def updateRel[T](rel: Any, relId: Any, idFn: T ⇒ Any, values: Seq[T]) = { + def updateRel[T](rel: Any, relId: Any, idFn: T => Any, values: Seq[T]) = { if (cacheableRel(rel, relId)) { - values.foreach { v ⇒ + values.foreach { v => update(idFn(v), v) } @@ -63,10 +63,10 @@ class SimpleFetcherCache extends FetcherCache { override def clearRel(rel: Any) = relCache.removeKeys { - case key @ (r, _) if r == rel ⇒ true - case _ ⇒ false + case key @ (r, _) if r == rel => true + case _ => false } override def clearRelId(rel: Any, relId: Any) = relCache.remove(cacheKeyRel(rel, relId)) -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/execution/deferred/HasId.scala b/src/main/scala/sangria/execution/deferred/HasId.scala index 9e544f77..5512e8da 100644 --- a/src/main/scala/sangria/execution/deferred/HasId.scala +++ b/src/main/scala/sangria/execution/deferred/HasId.scala @@ -8,9 +8,9 @@ trait HasId[T, Id] { } object HasId { - private class SimpleHasId[T, Id](fn: T ⇒ Id) extends HasId[T, Id] { + private class SimpleHasId[T, Id](fn: T => Id) extends HasId[T, Id] { def id(value: T) = fn(value) } - def apply[T, Id](fn: T ⇒ Id): HasId[T, Id] = new SimpleHasId[T, Id](fn) + def apply[T, Id](fn: T => Id): HasId[T, Id] = new SimpleHasId[T, Id](fn) } diff --git a/src/main/scala/sangria/execution/middleware.scala b/src/main/scala/sangria/execution/middleware.scala index ceed8169..05d87e20 100644 --- a/src/main/scala/sangria/execution/middleware.scala +++ b/src/main/scala/sangria/execution/middleware.scala @@ -16,28 +16,28 @@ trait Middleware[-Ctx] { } object Middleware { - def composeFromScalarMiddleware[Ctx](middleware: List[Middleware[Ctx]], userContext: Ctx): Option[(Any, InputType[_]) ⇒ Option[Either[Violation, Any]]] = { + def composeFromScalarMiddleware[Ctx](middleware: List[Middleware[Ctx]], userContext: Ctx): Option[(Any, InputType[_]) => Option[Either[Violation, Any]]] = { val relevant = middleware.collect { - case m: MiddlewareFromScalar[Ctx] ⇒ m + case m: MiddlewareFromScalar[Ctx] => m } if (relevant.nonEmpty) - Some((v, tpe) ⇒ { + Some((v, tpe) => { var changed = false var violation: Violation = null val newValue = relevant.foldLeft(v) { - case (acc, _) if violation != null ⇒ acc - case (acc, m) ⇒ m.fromScalar(acc, tpe, userContext) match { - case Some(Left(viol)) ⇒ + case (acc, _) if violation != null => acc + case (acc, m) => m.fromScalar(acc, tpe, userContext) match { + case Some(Left(viol)) => violation = viol acc - case Some(Right(newAcc)) ⇒ + case Some(Right(newAcc)) => changed = true newAcc - case None ⇒ + case None => acc } } @@ -49,23 +49,23 @@ object Middleware { else None } - def composeToScalarMiddleware[Ctx](middleware: List[Middleware[Ctx]], userContext: Ctx): Option[(Any, InputType[_]) ⇒ Option[Any]] = { + def composeToScalarMiddleware[Ctx](middleware: List[Middleware[Ctx]], userContext: Ctx): Option[(Any, InputType[_]) => Option[Any]] = { val relevant = middleware.collect { - case m: MiddlewareToScalar[Ctx] ⇒ m + case m: MiddlewareToScalar[Ctx] => m } if (relevant.nonEmpty) - Some((v, tpe) ⇒ { + Some((v, tpe) => { var changed = false val newValue = relevant.foldRight(v) { - case (m, acc) ⇒ m.toScalar(acc, tpe, userContext) match { - case Some(newAcc) ⇒ + case (m, acc) => m.toScalar(acc, tpe, userContext) match { + case Some(newAcc) => changed = true newAcc - case None ⇒ acc + case None => acc } } @@ -75,7 +75,7 @@ object Middleware { else None } - def simpleExtension[Ctx](extensionFn: MiddlewareQueryContext[Ctx, _, _] ⇒ ast.Value): Middleware[Ctx] = + def simpleExtension[Ctx](extensionFn: MiddlewareQueryContext[Ctx, _, _] => ast.Value): Middleware[Ctx] = new SimpleAstBasedExtensionMiddleware[Ctx](extensionFn) } diff --git a/src/main/scala/sangria/introspection/IntrospectionParser.scala b/src/main/scala/sangria/introspection/IntrospectionParser.scala index 3f22f4cb..fa71fd45 100644 --- a/src/main/scala/sangria/introspection/IntrospectionParser.scala +++ b/src/main/scala/sangria/introspection/IntrospectionParser.scala @@ -11,7 +11,7 @@ object IntrospectionParser { scheme.success(parseSchema(mapField(mapField(introspectionResult, "data"), "__schema", Vector("data")), Vector("data", "__schema"))) } catch { // exception mechanism is used intentionally in order to minimise the footprint of parsing - case e: IllegalAccessException ⇒ scheme.failure(e) + case e: IllegalAccessException => scheme.failure(e) } private def parseInputValue[In : InputUnmarshaller](value: In, path: Vector[String]) = @@ -25,7 +25,7 @@ object IntrospectionParser { IntrospectionField( name = mapStringField(field, "name", path), description = mapStringFieldOpt(field, "description"), - args = mapFieldOpt(field, "args") map um.getListValue getOrElse Vector.empty map (arg ⇒ parseInputValue(arg, path :+ "args")), + args = mapFieldOpt(field, "args") map um.getListValue getOrElse Vector.empty map (arg => parseInputValue(arg, path :+ "args")), tpe = parseTypeRef(mapField(field, "type", path), path :+ "type"), isDeprecated = mapBooleanField(field, "isDeprecated", path), deprecationReason = mapStringFieldOpt(field, "deprecationReason")) @@ -34,30 +34,30 @@ object IntrospectionParser { IntrospectionObjectType( name = mapStringField(tpe, "name", path), description = mapStringFieldOpt(tpe, "description", path), - fields = mapFieldOpt(tpe, "fields") map um.getListValue getOrElse Vector.empty map (field ⇒ parseField(field, path :+ "fields")), - interfaces = mapFieldOpt(tpe, "interfaces") map um.getListValue getOrElse Vector.empty map (i ⇒ parseNamedTypeRef(i, path :+ "interfaces")) + fields = mapFieldOpt(tpe, "fields") map um.getListValue getOrElse Vector.empty map (field => parseField(field, path :+ "fields")), + interfaces = mapFieldOpt(tpe, "interfaces") map um.getListValue getOrElse Vector.empty map (i => parseNamedTypeRef(i, path :+ "interfaces")) ) private def parseInterface[In : InputUnmarshaller](tpe: In, path: Vector[String]) = IntrospectionInterfaceType( name = mapStringField(tpe, "name", path), description = mapStringFieldOpt(tpe, "description", path), - fields = mapFieldOpt(tpe, "fields") map um.getListValue getOrElse Vector.empty map (field ⇒ parseField(field, path :+ "fields")), - possibleTypes = mapFieldOpt(tpe, "possibleTypes") map um.getListValue getOrElse Vector.empty map (i ⇒ parseNamedTypeRef(i, path :+ "possibleTypes")) + fields = mapFieldOpt(tpe, "fields") map um.getListValue getOrElse Vector.empty map (field => parseField(field, path :+ "fields")), + possibleTypes = mapFieldOpt(tpe, "possibleTypes") map um.getListValue getOrElse Vector.empty map (i => parseNamedTypeRef(i, path :+ "possibleTypes")) ) private def parseUnion[In : InputUnmarshaller](tpe: In, path: Vector[String]) = IntrospectionUnionType( name = mapStringField(tpe, "name", path), description = mapStringFieldOpt(tpe, "description", path), - possibleTypes = mapFieldOpt(tpe, "possibleTypes") map um.getListValue getOrElse Vector.empty map (i ⇒ parseNamedTypeRef(i, path :+ "possibleTypes")) + possibleTypes = mapFieldOpt(tpe, "possibleTypes") map um.getListValue getOrElse Vector.empty map (i => parseNamedTypeRef(i, path :+ "possibleTypes")) ) private def parseInputObject[In : InputUnmarshaller](tpe: In, path: Vector[String]) = IntrospectionInputObjectType( name = mapStringField(tpe, "name", path), description = mapStringFieldOpt(tpe, "description", path), - inputFields = mapFieldOpt(tpe, "inputFields") map um.getListValue getOrElse Vector.empty map (arg ⇒ parseInputValue(arg, path :+ "inputFields"))) + inputFields = mapFieldOpt(tpe, "inputFields") map um.getListValue getOrElse Vector.empty map (arg => parseInputValue(arg, path :+ "inputFields"))) private def parseScalar[In : InputUnmarshaller](tpe: In, path: Vector[String]) = IntrospectionScalarType( @@ -81,18 +81,18 @@ object IntrospectionParser { IntrospectionDirective( name = mapStringField(directive, "name", path), description = mapStringFieldOpt(directive, "description"), - locations = um.getListValue(mapField(directive, "locations")).map(v ⇒ DirectiveLocation.fromString(stringValue(v, path :+ "locations"))).toSet, - args = mapFieldOpt(directive, "args") map um.getListValue getOrElse Vector.empty map (arg ⇒ parseInputValue(arg, path :+ "args"))) + locations = um.getListValue(mapField(directive, "locations")).map(v => DirectiveLocation.fromString(stringValue(v, path :+ "locations"))).toSet, + args = mapFieldOpt(directive, "args") map um.getListValue getOrElse Vector.empty map (arg => parseInputValue(arg, path :+ "args"))) private def parseType[In : InputUnmarshaller](tpe: In, path: Vector[String]) = mapStringField(tpe, "kind", path) match { - case "OBJECT" ⇒ parseObject(tpe, path) - case "UNION" ⇒ parseUnion(tpe, path) - case "INTERFACE" ⇒ parseInterface(tpe, path) - case "INPUT_OBJECT" ⇒ parseInputObject(tpe, path) - case "SCALAR" ⇒ parseScalar(tpe, path) - case "ENUM" ⇒ parseEnum(tpe, path) - case kind ⇒ error(s"Unsupported kind: $kind") + case "OBJECT" => parseObject(tpe, path) + case "UNION" => parseUnion(tpe, path) + case "INTERFACE" => parseInterface(tpe, path) + case "INPUT_OBJECT" => parseInputObject(tpe, path) + case "SCALAR" => parseScalar(tpe, path) + case "ENUM" => parseEnum(tpe, path) + case kind => error(s"Unsupported kind: $kind") } private def parseSchema[In : InputUnmarshaller](schema: In, path: Vector[String]) = @@ -101,7 +101,7 @@ object IntrospectionParser { mutationType = mapFieldOpt(schema, "mutationType") map (parseNamedTypeRef(_, path :+ "mutationType")), subscriptionType = mapFieldOpt(schema, "subscriptionType") map (parseNamedTypeRef(_, path :+ "subscriptionType")), types = um.getListValue(mapField(schema, "types", path)) map (parseType(_, path :+ "types")), - directives = mapFieldOpt(schema, "directives") map um.getListValue getOrElse Vector.empty map (i ⇒ parseDirective(i, path :+ "directives")), + directives = mapFieldOpt(schema, "directives") map um.getListValue getOrElse Vector.empty map (i => parseDirective(i, path :+ "directives")), description = mapStringFieldOpt(schema, "description", path)) private def parseNamedTypeRef[In : InputUnmarshaller](in: In, path: Vector[String]) = @@ -109,34 +109,34 @@ object IntrospectionParser { private def parseTypeRef[In : InputUnmarshaller](in: In, path: Vector[String]): IntrospectionTypeRef = mapStringField(in, "kind", path) match { - case "LIST" ⇒ IntrospectionListTypeRef(parseTypeRef(mapField(in, "ofType", path), path :+ "ofType")) - case "NON_NULL" ⇒ IntrospectionNonNullTypeRef(parseTypeRef(mapField(in, "ofType", path), path :+ "ofType")) - case _ ⇒ parseNamedTypeRef(in, path) + case "LIST" => IntrospectionListTypeRef(parseTypeRef(mapField(in, "ofType", path), path :+ "ofType")) + case "NON_NULL" => IntrospectionNonNullTypeRef(parseTypeRef(mapField(in, "ofType", path), path :+ "ofType")) + case _ => parseNamedTypeRef(in, path) } private def required[T](obj: Option[T], path: Vector[String]) = obj match { - case Some(o) ⇒ o - case None ⇒ error(s"Required property is missing at path: ${path mkString "."}") + case Some(o) => o + case None => error(s"Required property is missing at path: ${path mkString "."}") } private def checkErrors[In : InputUnmarshaller](introspectionResult: In): Unit = um.getRootMapValue(introspectionResult, "errors") match { - case Some(errors) ⇒ + case Some(errors) => throw new IllegalArgumentException( s"Can't parse introspection results because it contains errors: ${um.render(errors)}") - case None ⇒ // everything is fine + case None => // everything is fine } private def stringValue[In : InputUnmarshaller](value: In, path: Vector[String]) = um.getScalaScalarValue(value) match { - case s: String ⇒ s - case _ ⇒ error(s"Expected String but got '${um.render(value)}' at path ${path mkString "."}") + case s: String => s + case _ => error(s"Expected String but got '${um.render(value)}' at path ${path mkString "."}") } private def booleanValue[In : InputUnmarshaller](value: In, path: Vector[String]) = um.getScalaScalarValue(value) match { - case b: Boolean ⇒ b - case _ ⇒ error(s"Expected Boolean but got '${um.render(value)}' at path ${path mkString "."}") + case b: Boolean => b + case _ => error(s"Expected Boolean but got '${um.render(value)}' at path ${path mkString "."}") } private def mapField[In : InputUnmarshaller](map: In, name: String, path: Vector[String] = Vector.empty): In = @@ -152,7 +152,7 @@ object IntrospectionParser { um.getMapValue(map, name) filter um.isDefined private def mapStringFieldOpt[In : InputUnmarshaller](map: In, name: String, path: Vector[String] = Vector.empty): Option[String] = - mapFieldOpt(map, name) filter um.isDefined map (s ⇒ stringValue(s, path :+ name) ) + mapFieldOpt(map, name) filter um.isDefined map (s => stringValue(s, path :+ name) ) private def um[T: InputUnmarshaller] = implicitly[InputUnmarshaller[T]] diff --git a/src/main/scala/sangria/introspection/package.scala b/src/main/scala/sangria/introspection/package.scala index 32e710a5..210757e6 100644 --- a/src/main/scala/sangria/introspection/package.scala +++ b/src/main/scala/sangria/introspection/package.scala @@ -11,14 +11,14 @@ package object introspection { val Scalar, Object, Interface, Union, Enum, InputObject, List, NonNull = Value def fromString(kind: String): TypeKind.Value = kind match { - case "SCALAR" ⇒ Scalar - case "OBJECT" ⇒ Object - case "INTERFACE" ⇒ Interface - case "UNION" ⇒ Union - case "ENUM" ⇒ Enum - case "INPUT_OBJECT" ⇒ InputObject - case "LIST" ⇒ List - case "NON_NULL" ⇒ NonNull + case "SCALAR" => Scalar + case "OBJECT" => Object + case "INTERFACE" => Interface + case "UNION" => Union + case "ENUM" => Enum + case "INPUT_OBJECT" => InputObject + case "LIST" => List + case "NON_NULL" => NonNull } } @@ -96,11 +96,11 @@ package object introspection { description = "Object and Interface types are described by a list of Fields, each of " + "which has a name, potentially a list of arguments, and a return type.", - fieldsFn = () ⇒ List[Field[Unit, Field[_, _]]]( + fieldsFn = () => List[Field[Unit, Field[_, _]]]( Field("name", StringType, resolve = _.value.name), Field("description", OptionType(StringType), resolve = _.value.description), Field("args", ListType(__InputValue), resolve = _.value.arguments), - Field("type", __Type, resolve = false → _.value.fieldType), + Field("type", __Type, resolve = false -> _.value.fieldType), Field("isDeprecated", BooleanType, resolve = _.value.deprecationReason.isDefined), Field("deprecationReason", OptionType(StringType), resolve = _.value.deprecationReason)) ) @@ -110,17 +110,17 @@ package object introspection { private def getKind(value: (Boolean, Type)) = { def identifyKind(t: Type, optional: Boolean): TypeKind.Value = t match { - case OptionType(ofType) ⇒ identifyKind(ofType, true) - case OptionInputType(ofType) ⇒ identifyKind(ofType, true) - case _ if !optional ⇒ TypeKind.NonNull - case _: ScalarType[_] ⇒ TypeKind.Scalar - case _: ScalarAlias[_, _] ⇒ TypeKind.Scalar - case _: ObjectType[_, _] ⇒ TypeKind.Object - case _: InterfaceType[_, _] ⇒ TypeKind.Interface - case _: UnionType[_] ⇒ TypeKind.Union - case _: EnumType[_] ⇒ TypeKind.Enum - case _: InputObjectType[_] ⇒ TypeKind.InputObject - case _: ListType[_] | _: ListInputType[_] ⇒ TypeKind.List + case OptionType(ofType) => identifyKind(ofType, true) + case OptionInputType(ofType) => identifyKind(ofType, true) + case _ if !optional => TypeKind.NonNull + case _: ScalarType[_] => TypeKind.Scalar + case _: ScalarAlias[_, _] => TypeKind.Scalar + case _: ObjectType[_, _] => TypeKind.Object + case _: InterfaceType[_, _] => TypeKind.Interface + case _: UnionType[_] => TypeKind.Union + case _: EnumType[_] => TypeKind.Enum + case _: InputObjectType[_] => TypeKind.InputObject + case _: ListType[_] | _: ListInputType[_] => TypeKind.List } val (fromTypeList, tpe) = value @@ -129,20 +129,20 @@ package object introspection { } private def findNamed(tpe: Type): Option[Type with Named] = tpe match { - case o: OptionType[_] ⇒ findNamed(o.ofType) - case o: OptionInputType[_] ⇒ findNamed(o.ofType) - case l: ListType[_] ⇒ findNamed(l.ofType) - case l: ListInputType[_] ⇒ findNamed(l.ofType) - case n: Type with Named ⇒ Some(n) - case _ ⇒ None + case o: OptionType[_] => findNamed(o.ofType) + case o: OptionInputType[_] => findNamed(o.ofType) + case l: ListType[_] => findNamed(l.ofType) + case l: ListInputType[_] => findNamed(l.ofType) + case n: Type with Named => Some(n) + case _ => None } private def findListType(tpe: Type): Option[Type] = tpe match { - case o: OptionType[_] ⇒ findListType(o.ofType) - case o: OptionInputType[_] ⇒ findListType(o.ofType) - case l: ListType[_] ⇒ Some(l.ofType) - case l: ListInputType[_] ⇒ Some(l.ofType) - case _ ⇒ None + case o: OptionType[_] => findListType(o.ofType) + case o: OptionInputType[_] => findListType(o.ofType) + case l: ListType[_] => Some(l.ofType) + case l: ListInputType[_] => Some(l.ofType) + case _ => None } val __Type: ObjectType[Unit, (Boolean, Type)] = ObjectType( @@ -156,60 +156,60 @@ package object introspection { "Object and Interface types provide the fields they describe. Abstract " + "types, Union and Interface, provide the Object types possible " + "at runtime. List and NonNull types compose other types.", - fieldsFn = () ⇒ List[Field[Unit, (Boolean, Type)]]( - Field("kind", __TypeKind, resolve = ctx ⇒ getKind(ctx.value)), - Field("name", OptionType(StringType), resolve = ctx ⇒ getKind(ctx.value) match { - case TypeKind.NonNull | TypeKind.List ⇒ None - case _ ⇒ findNamed(ctx.value._2) map (_.name) + fieldsFn = () => List[Field[Unit, (Boolean, Type)]]( + Field("kind", __TypeKind, resolve = ctx => getKind(ctx.value)), + Field("name", OptionType(StringType), resolve = ctx => getKind(ctx.value) match { + case TypeKind.NonNull | TypeKind.List => None + case _ => findNamed(ctx.value._2) map (_.name) }), - Field("description", OptionType(StringType), resolve = ctx ⇒ getKind(ctx.value) match { - case TypeKind.NonNull | TypeKind.List ⇒ None - case _ ⇒ findNamed(ctx.value._2) flatMap (_.description) + Field("description", OptionType(StringType), resolve = ctx => getKind(ctx.value) match { + case TypeKind.NonNull | TypeKind.List => None + case _ => findNamed(ctx.value._2) flatMap (_.description) }), Field("fields", OptionType(ListType(__Field)), arguments = includeDeprecated :: Nil, - resolve = ctx ⇒ { + resolve = ctx => { val incDep = ctx.arg(includeDeprecated) val (_, tpe) = ctx.value tpe match { - case t: ObjectLikeType[_, _] if incDep ⇒ Some(t.uniqueFields.asInstanceOf[Vector[Field[_, _]]]) - case t: ObjectLikeType[_, _] ⇒ Some(t.uniqueFields.asInstanceOf[Vector[Field[_, _]]].filter(_.deprecationReason.isEmpty)) - case _ ⇒ None + case t: ObjectLikeType[_, _] if incDep => Some(t.uniqueFields.asInstanceOf[Vector[Field[_, _]]]) + case t: ObjectLikeType[_, _] => Some(t.uniqueFields.asInstanceOf[Vector[Field[_, _]]].filter(_.deprecationReason.isEmpty)) + case _ => None } }), Field("interfaces", OptionType(ListType(__Type)), resolve = _.value._2 match { - case t: ObjectType[_, _] ⇒ Some(t.allInterfaces.asInstanceOf[Vector[Type]] map (true → _)) - case _ ⇒ None + case t: ObjectType[_, _] => Some(t.allInterfaces.asInstanceOf[Vector[Type]] map (true -> _)) + case _ => None }), - Field("possibleTypes", OptionType(ListType(__Type)), resolve = ctx ⇒ ctx.value._2 match { - case t: AbstractType ⇒ ctx.schema.possibleTypes.get(t.name) map { tpe ⇒ + Field("possibleTypes", OptionType(ListType(__Type)), resolve = ctx => ctx.value._2 match { + case t: AbstractType => ctx.schema.possibleTypes.get(t.name) map { tpe => t match { - case _: UnionType[_] ⇒ tpe map (true → _) - case _ ⇒ tpe sortBy (_.name) map (true → _) + case _: UnionType[_] => tpe map (true -> _) + case _ => tpe sortBy (_.name) map (true -> _) } } - case _ ⇒ None + case _ => None }), Field("enumValues", OptionType(ListType(__EnumValue)), arguments = includeDeprecated :: Nil, - resolve = ctx ⇒ { + resolve = ctx => { val incDep = ctx.arg(includeDeprecated) ctx.value._2 match { - case enum: EnumType[_] if incDep ⇒ Some(enum.values) - case enum: EnumType[_] ⇒ Some(enum.values.filter(_.deprecationReason.isEmpty)) - case _ ⇒ None + case enum: EnumType[_] if incDep => Some(enum.values) + case enum: EnumType[_] => Some(enum.values.filter(_.deprecationReason.isEmpty)) + case _ => None } }), Field("inputFields", OptionType(ListType(__InputValue)), resolve = _.value._2 match { - case io: InputObjectType[_] ⇒ Some(io.fields) - case _ ⇒ None + case io: InputObjectType[_] => Some(io.fields) + case _ => None }), - Field("ofType", OptionType(__Type), resolve = ctx ⇒ getKind(ctx.value) match { - case TypeKind.NonNull ⇒ Some(true → ctx.value._2) - case TypeKind.List ⇒ findListType(ctx.value._2) map (false → _) - case _ ⇒ None + Field("ofType", OptionType(__Type), resolve = ctx => getKind(ctx.value) match { + case TypeKind.NonNull => Some(true -> ctx.value._2) + case TypeKind.List => findListType(ctx.value._2) map (false -> _) + case _ => None })) ) @@ -222,10 +222,10 @@ package object introspection { fields = List[Field[Unit, InputValue[_]]]( Field("name", StringType, resolve = _.value.name), Field("description", OptionType(StringType), resolve = _.value.description), - Field("type", __Type, resolve = false → _.value.inputValueType), + Field("type", __Type, resolve = false -> _.value.inputValueType), Field("defaultValue", OptionType(StringType), description = Some("A GraphQL-formatted string representing the default value for this input value."), - resolve = ctx ⇒ ctx.value.defaultValue.flatMap(ctx.renderInputValueCompact(_, ctx.value.inputValueType))) + resolve = ctx => ctx.value.defaultValue.flatMap(ctx.renderInputValueCompact(_, ctx.value.inputValueType))) )) val __EnumValue: ObjectType[Unit, EnumValue[_]] = ObjectType( @@ -265,15 +265,15 @@ package object introspection { fields = List[Field[Unit, Schema[Any, Any]]]( Field("description", OptionType(StringType), resolve = _.value.description), Field("types", ListType(__Type), Some("A list of all types supported by this server."), - resolve = _.value.typeList map (true → _)), + resolve = _.value.typeList map (true -> _)), Field("queryType", __Type, Some("The type that query operations will be rooted at."), - resolve = true → _.value.query), + resolve = true -> _.value.query), Field("mutationType", OptionType(__Type), Some("If this server supports mutation, the type that mutation operations will be rooted at."), - resolve = _.value.mutation map (true → _)), + resolve = _.value.mutation map (true -> _)), Field("subscriptionType", OptionType(__Type), Some("If this server support subscription, the type that subscription operations will be rooted at."), - resolve = _.value.subscription map (true → _)), + resolve = _.value.subscription map (true -> _)), Field("directives", ListType(__Directive), Some("A list of all directives supported by this server."), resolve = _.value.directives))) @@ -288,13 +288,13 @@ package object introspection { fieldType = OptionType(__Type), description = Some("Request the type information of a single type."), arguments = Argument("name", StringType) :: Nil, - resolve = ctx ⇒ ctx.schema.types get ctx.arg[String]("name") map (true → _._2)) + resolve = ctx => ctx.schema.types get ctx.arg[String]("name") map (true -> _._2)) val TypeNameMetaField: Field[Unit, Unit] = Field( name = "__typename", fieldType = StringType, description = Some("The name of the current Object type at runtime."), - resolve = ctx ⇒ ctx.parentType.name) + resolve = ctx => ctx.parentType.name) val MetaFieldNames = Set(SchemaMetaField.name, TypeMetaField.name, TypeNameMetaField.name) diff --git a/src/main/scala/sangria/macros/AstLiftable.scala b/src/main/scala/sangria/macros/AstLiftable.scala index 3f4e038e..65db5f43 100644 --- a/src/main/scala/sangria/macros/AstLiftable.scala +++ b/src/main/scala/sangria/macros/AstLiftable.scala @@ -14,146 +14,146 @@ trait AstLiftable { import universe._ - implicit def liftSeq[T : Liftable]: Liftable[Seq[T]] = Liftable { seq ⇒ + implicit def liftSeq[T : Liftable]: Liftable[Seq[T]] = Liftable { seq => q"_root_.scala.collection.immutable.Vector(..$seq)" } implicit def liftPosition: Liftable[sangria.ast.AstLocation] = Liftable { - case sangria.ast.AstLocation(id, i, l, c) ⇒ + case sangria.ast.AstLocation(id, i, l, c) => q"_root_.sangria.ast.AstLocation($id, $i, $l, $c)" } implicit def liftOperationType: Liftable[OperationType] = Liftable { - case OperationType.Query ⇒ q"_root_.sangria.ast.OperationType.Query" - case OperationType.Mutation ⇒ q"_root_.sangria.ast.OperationType.Mutation" - case OperationType.Subscription ⇒ q"_root_.sangria.ast.OperationType.Subscription" + case OperationType.Query => q"_root_.sangria.ast.OperationType.Query" + case OperationType.Mutation => q"_root_.sangria.ast.OperationType.Mutation" + case OperationType.Subscription => q"_root_.sangria.ast.OperationType.Subscription" } implicit def liftType[T <: sangria.ast.Type]: Liftable[T] = Liftable { - case NamedType(n, p) ⇒ q"_root_.sangria.ast.NamedType($n, $p)" - case NotNullType(o, p) ⇒ q"_root_.sangria.ast.NotNullType($o, $p)" - case ListType(o, p) ⇒ q"_root_.sangria.ast.ListType($o, $p)" + case NamedType(n, p) => q"_root_.sangria.ast.NamedType($n, $p)" + case NotNullType(o, p) => q"_root_.sangria.ast.NotNullType($o, $p)" + case ListType(o, p) => q"_root_.sangria.ast.ListType($o, $p)" } implicit def liftComment: Liftable[Comment] = Liftable { - case Comment(l, p) ⇒ + case Comment(l, p) => q"_root_.sangria.ast.Comment($l, $p)" } implicit def liftVarDef: Liftable[VariableDefinition] = Liftable { - case VariableDefinition(n, t, d, dirs, c, p) ⇒ + case VariableDefinition(n, t, d, dirs, c, p) => q"_root_.sangria.ast.VariableDefinition($n, $t, $d, $dirs, $c, $p)" } implicit def liftInpValDef: Liftable[InputValueDefinition] = Liftable { - case InputValueDefinition(n, v, de, di, desc, c, p) ⇒ + case InputValueDefinition(n, v, de, di, desc, c, p) => q"_root_.sangria.ast.InputValueDefinition($n, $v, $de, $di, $desc, $c, $p)" } implicit def liftInpOpTpeDef: Liftable[OperationTypeDefinition] = Liftable { - case OperationTypeDefinition(o, t, c, p) ⇒ + case OperationTypeDefinition(o, t, c, p) => q"_root_.sangria.ast.OperationTypeDefinition($o, $t, $c, $p)" } implicit def liftEnumValDef: Liftable[EnumValueDefinition] = Liftable { - case EnumValueDefinition(n, d, desc, c, p) ⇒ + case EnumValueDefinition(n, d, desc, c, p) => q"_root_.sangria.ast.EnumValueDefinition($n, $d, $desc, $c, $p)" } implicit def liftFieldDef: Liftable[FieldDefinition] = Liftable { - case FieldDefinition(n, f, a, d, desc, c, p) ⇒ + case FieldDefinition(n, f, a, d, desc, c, p) => q"_root_.sangria.ast.FieldDefinition($n, $f, $a, $d, $desc, $c, $p)" } implicit def liftDirLocDef: Liftable[DirectiveLocation] = Liftable { - case DirectiveLocation(n, c, p) ⇒ + case DirectiveLocation(n, c, p) => q"_root_.sangria.ast.DirectiveLocation($n, $c, $p)" } implicit def liftDefinition[T <: Definition]: Liftable[T] = Liftable { - case OperationDefinition(o, n, v, d, s, c, tc, p) ⇒ + case OperationDefinition(o, n, v, d, s, c, tc, p) => q"_root_.sangria.ast.OperationDefinition($o, $n, $v, $d, $s, $c, $tc, $p)" - case FragmentDefinition(n, t, d, s, v, c, tc, p) ⇒ + case FragmentDefinition(n, t, d, s, v, c, tc, p) => q"_root_.sangria.ast.FragmentDefinition($n, $t, $d, $s, $v, $c, $tc, $p)" - case DirectiveDefinition(n, a, l, desc, c, p) ⇒ + case DirectiveDefinition(n, a, l, desc, c, p) => q"_root_.sangria.ast.DirectiveDefinition($n, $a, $l, $desc, $c, $p)" - case SchemaDefinition(o, d, desc, c, tc, p) ⇒ + case SchemaDefinition(o, d, desc, c, tc, p) => q"_root_.sangria.ast.SchemaDefinition($o, $d, $desc, $c, $tc, $p)" - case ObjectTypeExtensionDefinition(n, i, f, d, c, tc, p) ⇒ + case ObjectTypeExtensionDefinition(n, i, f, d, c, tc, p) => q"_root_.sangria.ast.ObjectTypeExtensionDefinition($n, $i, $f, $d, $c, $tc, $p)" - case InterfaceTypeExtensionDefinition(n, f, d, c, tc, p) ⇒ + case InterfaceTypeExtensionDefinition(n, f, d, c, tc, p) => q"_root_.sangria.ast.InterfaceTypeExtensionDefinition($n, $f, $d, $c, $tc, $p)" - case InputObjectTypeExtensionDefinition(n, f, d, c, tc, p) ⇒ + case InputObjectTypeExtensionDefinition(n, f, d, c, tc, p) => q"_root_.sangria.ast.InputObjectTypeExtensionDefinition($n, $f, $d, $c, $tc, $p)" - case UnionTypeExtensionDefinition(n, t, d, c, p) ⇒ + case UnionTypeExtensionDefinition(n, t, d, c, p) => q"_root_.sangria.ast.UnionTypeExtensionDefinition($n, $t, $d, $c, $p)" - case EnumTypeExtensionDefinition(n, v, d, c, tc, p) ⇒ + case EnumTypeExtensionDefinition(n, v, d, c, tc, p) => q"_root_.sangria.ast.EnumTypeExtensionDefinition($n, $v, $d, $c, $tc, $p)" - case ScalarTypeExtensionDefinition(n, d, c, p) ⇒ + case ScalarTypeExtensionDefinition(n, d, c, p) => q"_root_.sangria.ast.ScalarTypeExtensionDefinition($n, $d, $c, $p)" - case SchemaExtensionDefinition(o, d, c, tc, p) ⇒ + case SchemaExtensionDefinition(o, d, c, tc, p) => q"_root_.sangria.ast.SchemaExtensionDefinition($o, $d, $c, $tc, $p)" - case EnumTypeDefinition(n, v, d, desc, c, tc, p) ⇒ + case EnumTypeDefinition(n, v, d, desc, c, tc, p) => q"_root_.sangria.ast.EnumTypeDefinition($n, $v, $d, $desc, $c, $tc, $p)" - case InputObjectTypeDefinition(n, f, d, desc, c, tc, p) ⇒ + case InputObjectTypeDefinition(n, f, d, desc, c, tc, p) => q"_root_.sangria.ast.InputObjectTypeDefinition($n, $f, $d, $desc, $c, $tc, $p)" - case InterfaceTypeDefinition(n, f, d, desc, c, tc, p) ⇒ + case InterfaceTypeDefinition(n, f, d, desc, c, tc, p) => q"_root_.sangria.ast.InterfaceTypeDefinition($n, $f, $d, $desc, $c, $tc, $p)" - case ObjectTypeDefinition(n, i, f, d, desc, c, tc, p) ⇒ + case ObjectTypeDefinition(n, i, f, d, desc, c, tc, p) => q"_root_.sangria.ast.ObjectTypeDefinition($n, $i, $f, $d, $desc, $c, $tc, $p)" - case ScalarTypeDefinition(n, d, desc, c, p) ⇒ + case ScalarTypeDefinition(n, d, desc, c, p) => q"_root_.sangria.ast.ScalarTypeDefinition($n, $d, $desc, $c, $p)" - case UnionTypeDefinition(n, t, d, desc, c, p) ⇒ + case UnionTypeDefinition(n, t, d, desc, c, p) => q"_root_.sangria.ast.UnionTypeDefinition($n, $t, $d, $desc, $c, $p)" } implicit def liftNamedValue[T <: NameValue]: Liftable[T] = Liftable { - case Argument(n, v, c, p) ⇒ q"_root_.sangria.ast.Argument($n, $v, $c, $p)" - case ObjectField(n, v, c, p) ⇒ q"_root_.sangria.ast.ObjectField($n, $v, $c, $p)" + case Argument(n, v, c, p) => q"_root_.sangria.ast.Argument($n, $v, $c, $p)" + case ObjectField(n, v, c, p) => q"_root_.sangria.ast.ObjectField($n, $v, $c, $p)" } implicit def liftValue[T <: sangria.ast.Value]: Liftable[T] = Liftable { - case IntValue(v, c, p) ⇒ q"_root_.sangria.ast.IntValue($v, $c, $p)" - case FloatValue(v, c, p) ⇒ q"_root_.sangria.ast.FloatValue($v, $c, $p)" - case StringValue(v, b, r, c, p) ⇒ q"_root_.sangria.ast.StringValue($v, $b, $r, $c, $p)" - case BooleanValue(v, c, p) ⇒ q"_root_.sangria.ast.BooleanValue($v, $c, $p)" - case NullValue(c, p) ⇒ q"_root_.sangria.ast.NullValue($c, $p)" - case EnumValue(v, c, p) ⇒ q"_root_.sangria.ast.EnumValue($v, $c, $p)" - case ListValue(v, c, p) ⇒ q"_root_.sangria.ast.ListValue($v, $c, $p)" - case ObjectValue(f, c, p) ⇒ q"_root_.sangria.ast.ObjectValue($f, $c, $p)" - case VariableValue(n, c, p) ⇒ q"_root_.sangria.ast.VariableValue($n, $c, $p)" - case BigIntValue(v, c, p) ⇒ + case IntValue(v, c, p) => q"_root_.sangria.ast.IntValue($v, $c, $p)" + case FloatValue(v, c, p) => q"_root_.sangria.ast.FloatValue($v, $c, $p)" + case StringValue(v, b, r, c, p) => q"_root_.sangria.ast.StringValue($v, $b, $r, $c, $p)" + case BooleanValue(v, c, p) => q"_root_.sangria.ast.BooleanValue($v, $c, $p)" + case NullValue(c, p) => q"_root_.sangria.ast.NullValue($c, $p)" + case EnumValue(v, c, p) => q"_root_.sangria.ast.EnumValue($v, $c, $p)" + case ListValue(v, c, p) => q"_root_.sangria.ast.ListValue($v, $c, $p)" + case ObjectValue(f, c, p) => q"_root_.sangria.ast.ObjectValue($f, $c, $p)" + case VariableValue(n, c, p) => q"_root_.sangria.ast.VariableValue($n, $c, $p)" + case BigIntValue(v, c, p) => q"_root_.sangria.ast.BigIntValue(_root_.scala.math.BigInt(${v.toByteArray}), $c, $p)" - case sangria.ast.BigDecimalValue(v, c, p) ⇒ + case sangria.ast.BigDecimalValue(v, c, p) => q"_root_.sangria.ast.BigDecimalValue(_root_.scala.math.BigDecimal(${v.toString()}), $c, $p)" } implicit def directive: Liftable[sangria.ast.Directive] = Liftable { - case Directive(n, a, c, p) ⇒ q"_root_.sangria.ast.Directive($n, $a, $c, $p)" + case Directive(n, a, c, p) => q"_root_.sangria.ast.Directive($n, $a, $c, $p)" } implicit def selection[T <: Selection]: Liftable[T] = Liftable { - case Field(a, n, arg, d, s, c, tc, p) ⇒ + case Field(a, n, arg, d, s, c, tc, p) => q"_root_.sangria.ast.Field($a, $n, $arg, $d, $s, $c, $tc, $p)" - case FragmentSpread(n, d, c, p) ⇒ + case FragmentSpread(n, d, c, p) => q"_root_.sangria.ast.FragmentSpread($n, $d, $c, $p)" - case InlineFragment(t, d, s, c, tc, p) ⇒ + case InlineFragment(t, d, s, c, tc, p) => q"_root_.sangria.ast.InlineFragment($t, $d, $s, $c, $tc, $p)" } implicit def liftDocument: Liftable[Document] = Liftable { - case doc @ Document(d, c, p, _) ⇒ q"_root_.sangria.ast.Document($d, $c, $p, _root_.scala.Some(new _root_.sangria.parser.DefaultSourceMapper(${doc.sourceMapper.get.id}, _root_.org.parboiled2.ParserInput(${doc.source.get}))))" + case doc @ Document(d, c, p, _) => q"_root_.sangria.ast.Document($d, $c, $p, _root_.scala.Some(new _root_.sangria.parser.DefaultSourceMapper(${doc.sourceMapper.get.id}, _root_.org.parboiled2.ParserInput(${doc.source.get}))))" } implicit def liftInputDocument: Liftable[InputDocument] = Liftable { - case doc @ InputDocument(d, c, p, _) ⇒ q"_root_.sangria.ast.InputDocument($d, $c, $p, _root_.scala.Some(new _root_.sangria.parser.DefaultSourceMapper(${doc.sourceMapper.get.id}, _root_.org.parboiled2.ParserInput(${doc.source.get}))))" + case doc @ InputDocument(d, c, p, _) => q"_root_.sangria.ast.InputDocument($d, $c, $p, _root_.scala.Some(new _root_.sangria.parser.DefaultSourceMapper(${doc.sourceMapper.get.id}, _root_.org.parboiled2.ParserInput(${doc.source.get}))))" } } trait MacroAstLiftable extends AstLiftable { val c: blackbox.Context val universe: c.universe.type = c.universe -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/macros/ParseMacro.scala b/src/main/scala/sangria/macros/ParseMacro.scala index f7cc6d07..a097b983 100644 --- a/src/main/scala/sangria/macros/ParseMacro.scala +++ b/src/main/scala/sangria/macros/ParseMacro.scala @@ -17,15 +17,15 @@ class ParseMacro(context: blackbox.Context) extends { c.prefix.tree match { // Expects a string interpolation that doesn't contain any // expressions, thus containing only a single tree - case Apply(_, List(Apply(_, t :: Nil))) ⇒ + case Apply(_, List(Apply(_, t :: Nil))) => val q"${gql: String}" = t try { q"${QueryParser.parse(gql).get}" } catch { - case error: SyntaxError ⇒ syntaxError(error) + case error: SyntaxError => syntaxError(error) } - case _ ⇒ + case _ => c.abort(c.enclosingPosition, "Invalid `graphql` invocation syntax.") } @@ -36,15 +36,15 @@ class ParseMacro(context: blackbox.Context) extends { c.prefix.tree match { // Expects a string interpolation that doesn't contain any // expressions, thus containing only a single tree - case Apply(_, List(Apply(_, t :: Nil))) ⇒ + case Apply(_, List(Apply(_, t :: Nil))) => val q"${gql: String}" = t try { q"${QueryParser.parseInput(gql).get}" } catch { - case error: SyntaxError ⇒ syntaxError(error) + case error: SyntaxError => syntaxError(error) } - case _ ⇒ + case _ => c.abort(c.enclosingPosition, "Invalid `graphql` invocation syntax.") } @@ -55,15 +55,15 @@ class ParseMacro(context: blackbox.Context) extends { c.prefix.tree match { // Expects a string interpolation that doesn't contain any // expressions, thus containing only a single tree - case Apply(_, List(Apply(_, t :: Nil))) ⇒ + case Apply(_, List(Apply(_, t :: Nil))) => val q"${gql: String}" = t try { q"${QueryParser.parseInputDocument(gql).get}" } catch { - case error: SyntaxError ⇒ syntaxError(error) + case error: SyntaxError => syntaxError(error) } - case _ ⇒ + case _ => c.abort(c.enclosingPosition, "Invalid `graphql` invocation syntax.") } diff --git a/src/main/scala/sangria/macros/derive/DeriveEnumSetting.scala b/src/main/scala/sangria/macros/derive/DeriveEnumSetting.scala index 543ded04..63733bfd 100644 --- a/src/main/scala/sangria/macros/derive/DeriveEnumSetting.scala +++ b/src/main/scala/sangria/macros/derive/DeriveEnumSetting.scala @@ -15,4 +15,4 @@ case class RenameValue(value: String, graphqlName: String) extends DeriveEnumSet case class IncludeValues(values: String*) extends DeriveEnumSetting case class ExcludeValues(fieldNames: String*) extends DeriveEnumSetting -case class TransformValueNames(transformer: String ⇒ String) extends DeriveEnumSetting \ No newline at end of file +case class TransformValueNames(transformer: String => String) extends DeriveEnumSetting diff --git a/src/main/scala/sangria/macros/derive/DeriveEnumTypeMacro.scala b/src/main/scala/sangria/macros/derive/DeriveEnumTypeMacro.scala index 95c0e947..83fed81e 100644 --- a/src/main/scala/sangria/macros/derive/DeriveEnumTypeMacro.scala +++ b/src/main/scala/sangria/macros/derive/DeriveEnumTypeMacro.scala @@ -11,30 +11,30 @@ class DeriveEnumTypeMacro(context: blackbox.Context) extends { def deriveEnumType[T : WeakTypeTag](config: Tree*) = { val t = weakTypeTag[T] val validatedConfig = validateEnumConfig(config) - val errors = validatedConfig.collect {case Left(error) ⇒ error} + val errors = validatedConfig.collect {case Left(error) => error} if (errors.nonEmpty) reportErrors(errors) else { - val validConfig = validatedConfig.collect {case Right(cfg) ⇒ cfg} + val validConfig = validatedConfig.collect {case Right(cfg) => cfg} val (tpe, validatedValues) = if (t.tpe <:< typeOf[Enumeration#Value]) - t.tpe.asInstanceOf[TypeRef].pre → Right(collectEnumerationValues(t.tpe)) + t.tpe.asInstanceOf[TypeRef].pre -> Right(collectEnumerationValues(t.tpe)) else - t.tpe → collectKnownEnumSubtypes(t.tpe.typeSymbol) + t.tpe -> collectKnownEnumSubtypes(t.tpe.typeSymbol) validatedValues match { - case Left(error) ⇒ reportErrors(error :: Nil) - case Right(values) ⇒ + case Left(error) => reportErrors(error :: Nil) + case Right(values) => validateEnumValueConfig(values, validConfig) match { - case Nil ⇒ + case Nil => val tpeName = q"${tpe.typeSymbol.name.decodedName.toString}" val annotationName = symbolName(tpe.typeSymbol.annotations) - val configName = validConfig.collect{case MacroEnumTypeName(name) ⇒ name}.lastOption + val configName = validConfig.collect{case MacroEnumTypeName(name) => name}.lastOption val annotationDesc = symbolDescription(tpe.typeSymbol.annotations) - val configDesc = validConfig.collect{case MacroEnumTypeDescription(name) ⇒ name}.lastOption + val configDesc = validConfig.collect{case MacroEnumTypeDescription(name) => name}.lastOption val enumValues = collectEnumValues(values, validConfig, t.tpe) @@ -44,7 +44,7 @@ class DeriveEnumTypeMacro(context: blackbox.Context) extends { ${configDesc orElse annotationDesc}, $enumValues) """ - case configErrors ⇒ reportErrors(configErrors) + case configErrors => reportErrors(configErrors) } } @@ -53,7 +53,7 @@ class DeriveEnumTypeMacro(context: blackbox.Context) extends { private def collectEnumerationValues(tpe: Type): List[Symbol] = tpe.asInstanceOf[TypeRef].pre.members - .filter(s ⇒ s.isTerm && !(s.isMethod || s.isModule || s.isClass) && (s.typeSignature.resultType <:< typeOf[Enumeration#Value])) + .filter(s => s.isTerm && !(s.isMethod || s.isModule || s.isClass) && (s.typeSignature.resultType <:< typeOf[Enumeration#Value])) .toList private def collectKnownEnumSubtypes(s: Symbol): Either[(Position, String), List[Symbol]] = @@ -63,28 +63,28 @@ class DeriveEnumTypeMacro(context: blackbox.Context) extends { if ((cs.isTrait || cs.isAbstract) && cs.isSealed) cs.knownDirectSubclasses.foldLeft(Right(Nil): Either[(Position, String), List[Symbol]]) { - case (Left(error), _) ⇒ Left(error) - case (Right(set), knownSubclass) ⇒ + case (Left(error), _) => Left(error) + case (Right(set), knownSubclass) => collectKnownEnumSubtypes(knownSubclass) match { - case Left(error) ⇒ Left(error) - case Right(subset) ⇒ Right(set ++ subset) + case Left(error) => Left(error) + case Right(subset) => Right(set ++ subset) } } - else Left(cs.pos → "Only `Enumeration` and sealed hierarchies of case objects are supported for GraphQL EnumType derivation.") - } else Left(c.enclosingPosition → "Only `Enumeration` and sealed hierarchies of case objects are supported for GraphQL EnumType derivation.") + else Left(cs.pos -> "Only `Enumeration` and sealed hierarchies of case objects are supported for GraphQL EnumType derivation.") + } else Left(c.enclosingPosition -> "Only `Enumeration` and sealed hierarchies of case objects are supported for GraphQL EnumType derivation.") private def collectEnumValues(values: List[Symbol], config: Seq[MacroDeriveEnumSetting], t: Type): List[Tree] = { val extractedValues = extractEnumValues(values, config) - if (extractedValues.isEmpty) reportErrors(List(c.enclosingPosition → "Enum value list is empty")) + if (extractedValues.isEmpty) reportErrors(List(c.enclosingPosition -> "Enum value list is empty")) else - extractedValues map { value ⇒ + extractedValues map { value => val name = value.name.decodedName.toString.trim val annotationName = symbolName(value.annotations) - val configName = config.collect{case MacroRenameValue(`name`, tree, _) ⇒ tree}.lastOption + val configName = config.collect{case MacroRenameValue(`name`, tree, _) => tree}.lastOption val actualName = { val nonTransformedName = configName orElse annotationName getOrElse q"$name" - val transformFnOpt = config.collect{case MacroTransformValueNames(fn) ⇒ fn}.lastOption + val transformFnOpt = config.collect{case MacroTransformValueNames(fn) => fn}.lastOption val upperCase = config.exists(_.isInstanceOf[MacroUppercaseValues]) val transformed = transformFnOpt.map(fn => q"$fn($nonTransformedName)").getOrElse(nonTransformedName) @@ -95,11 +95,11 @@ class DeriveEnumTypeMacro(context: blackbox.Context) extends { } val annotationDescr = symbolDescription(value.annotations) - val configDescr = config.collect{case MacroDocumentValue(`name`, tree, _, _) ⇒ tree}.lastOption + val configDescr = config.collect{case MacroDocumentValue(`name`, tree, _, _) => tree}.lastOption val annotationDepr = symbolDeprecation(value.annotations) - val configDocDepr = config.collect{case MacroDocumentValue(`name`, _, reason, _) ⇒ reason}.lastOption getOrElse q"None" - val configDepr = config.collect{case MacroDeprecateValue(`name`, reason, _) ⇒ reason}.lastOption getOrElse q"None" + val configDocDepr = config.collect{case MacroDocumentValue(`name`, _, reason, _) => reason}.lastOption getOrElse q"None" + val configDepr = config.collect{case MacroDeprecateValue(`name`, reason, _) => reason}.lastOption getOrElse q"None" val actualValue = { if (value.isModuleClass) { @@ -123,90 +123,90 @@ class DeriveEnumTypeMacro(context: blackbox.Context) extends { private def extractEnumValues(values: List[Symbol], config: Seq[MacroDeriveEnumSetting]) = { val included = config.foldLeft(Set.empty[String]){ - case (acc, MacroIncludeValues(vals, _)) ⇒ acc ++ vals - case (acc, _) ⇒ acc + case (acc, MacroIncludeValues(vals, _)) => acc ++ vals + case (acc, _) => acc } val excluded = config.foldLeft(Set.empty[String]){ - case (acc, MacroExcludeValues(vals, _)) ⇒ acc ++ vals - case (acc, _) ⇒ acc + case (acc, MacroExcludeValues(vals, _)) => acc ++ vals + case (acc, _) => acc } val actualIncluded = if (included.nonEmpty) included - else values.map(m ⇒ m.name.decodedName.toString.trim).toSet + else values.map(m => m.name.decodedName.toString.trim).toSet val actualFields = actualIncluded -- excluded - values.filter(m ⇒ actualFields.contains(m.name.decodedName.toString.trim) && !memberExcluded(m.annotations)) + values.filter(m => actualFields.contains(m.name.decodedName.toString.trim) && !memberExcluded(m.annotations)) } private def validateEnumValueConfig(knownMembers: List[Symbol], config: Seq[MacroDeriveEnumSetting]) = { val knownMembersSet = knownMembers.map(_.name.decodedName.toString.trim).toSet def unknownMember(pos: Position, name: String) = - pos → s"Unknown enum value '$name'. Known members are: ${knownMembers map (_.name.decodedName.toString) mkString ", "}" + pos -> s"Unknown enum value '$name'. Known members are: ${knownMembers map (_.name.decodedName.toString) mkString ", "}" val valueValidations = config.toList.flatMap { - case MacroIncludeValues(values, pos) if !values.forall(knownMembersSet.contains) ⇒ + case MacroIncludeValues(values, pos) if !values.forall(knownMembersSet.contains) => val unknown = values.diff(knownMembersSet) unknown.toList.map(unknownMember(pos, _)) - case MacroExcludeValues(values, pos) if !values.forall(knownMembersSet.contains) ⇒ + case MacroExcludeValues(values, pos) if !values.forall(knownMembersSet.contains) => val unknown = values.diff(knownMembersSet) unknown.toList.map(unknownMember(pos, _)) - case MacroDocumentValue(value, _, _, pos) if !knownMembersSet.contains(value) ⇒ + case MacroDocumentValue(value, _, _, pos) if !knownMembersSet.contains(value) => unknownMember(pos, value) :: Nil - case MacroRenameValue(value, _, pos) if !knownMembersSet.contains(value) ⇒ + case MacroRenameValue(value, _, pos) if !knownMembersSet.contains(value) => unknownMember(pos, value) :: Nil - case MacroDeprecateValue(value, _, pos) if !knownMembersSet.contains(value) ⇒ + case MacroDeprecateValue(value, _, pos) if !knownMembersSet.contains(value) => unknownMember(pos, value) :: Nil - case _ ⇒ Nil + case _ => Nil } - config.collect{case MacroUppercaseValues(pos) ⇒ pos}.lastOption match { - case Some(pos) if config.exists(_.isInstanceOf[MacroRenameValue]) ⇒ - valueValidations :+ (pos → "`UppercaseValues` is used together with `RenameValue` which is not allowed.") - case _ ⇒ + config.collect{case MacroUppercaseValues(pos) => pos}.lastOption match { + case Some(pos) if config.exists(_.isInstanceOf[MacroRenameValue]) => + valueValidations :+ (pos -> "`UppercaseValues` is used together with `RenameValue` which is not allowed.") + case _ => valueValidations } } private def validateEnumConfig(config: Seq[Tree]) = config.map { - case q"$setting.apply($name)" if checkSetting[EnumTypeName.type](setting) ⇒ + case q"$setting.apply($name)" if checkSetting[EnumTypeName.type](setting) => Right(MacroEnumTypeName(name)) - case q"$setting.apply($description)" if checkSetting[EnumTypeDescription.type](setting) ⇒ + case q"$setting.apply($description)" if checkSetting[EnumTypeDescription.type](setting) => Right(MacroEnumTypeDescription(description)) - case tree @ q"$setting" if checkSetting[UppercaseValues.type](setting) ⇒ + case tree @ q"$setting" if checkSetting[UppercaseValues.type](setting) => Right(MacroUppercaseValues(tree.pos)) - case tree @ q"$setting.apply(${value: String}, $description, $deprecationReason)" if checkSetting[DocumentValue.type](setting) ⇒ + case tree @ q"$setting.apply(${value: String}, $description, $deprecationReason)" if checkSetting[DocumentValue.type](setting) => Right(MacroDocumentValue(value, description, deprecationReason, tree.pos)) - case tree @ q"$setting.apply(${value: String}, $graphqlName)" if checkSetting[RenameValue.type](setting) ⇒ + case tree @ q"$setting.apply(${value: String}, $graphqlName)" if checkSetting[RenameValue.type](setting) => Right(MacroRenameValue(value, graphqlName, tree.pos)) - case tree @ q"$setting.apply(${value: String}, $deprecationReason)" if checkSetting[DeprecateValue.type](setting) ⇒ + case tree @ q"$setting.apply(${value: String}, $deprecationReason)" if checkSetting[DeprecateValue.type](setting) => Right(MacroDeprecateValue(value, q"Some($deprecationReason)", tree.pos)) - case tree @ q"$setting.apply(..${values: List[String]})" if checkSetting[IncludeValues.type](setting) ⇒ + case tree @ q"$setting.apply(..${values: List[String]})" if checkSetting[IncludeValues.type](setting) => Right(MacroIncludeValues(values.toSet, tree.pos)) - case tree @ q"$setting.apply(..${values: List[String]})" if checkSetting[ExcludeValues.type](setting) ⇒ + case tree @ q"$setting.apply(..${values: List[String]})" if checkSetting[ExcludeValues.type](setting) => Right(MacroExcludeValues(values.toSet, tree.pos)) - case tree @ q"$setting.apply($fn)" if checkSetting[TransformValueNames.type](setting) ⇒ + case tree @ q"$setting.apply($fn)" if checkSetting[TransformValueNames.type](setting) => Right(MacroTransformValueNames(fn)) - case tree ⇒ Left(tree.pos → + case tree => Left(tree.pos -> "Unsupported shape of derivation config. Please define subclasses of `DeriveEnumTypeConfig` directly in the argument list of the macro.") } diff --git a/src/main/scala/sangria/macros/derive/DeriveInputObjectSetting.scala b/src/main/scala/sangria/macros/derive/DeriveInputObjectSetting.scala index b6834be7..6bb639f9 100644 --- a/src/main/scala/sangria/macros/derive/DeriveInputObjectSetting.scala +++ b/src/main/scala/sangria/macros/derive/DeriveInputObjectSetting.scala @@ -15,4 +15,4 @@ case class ReplaceInputField(fieldName: String, field: InputField[_]) extends De case class IncludeInputFields(fieldNames: String*) extends DeriveInputObjectSetting case class ExcludeInputFields(fieldNames: String*) extends DeriveInputObjectSetting -case class TransformInputFieldNames(transformer: String ⇒ String) extends DeriveInputObjectSetting \ No newline at end of file +case class TransformInputFieldNames(transformer: String => String) extends DeriveInputObjectSetting diff --git a/src/main/scala/sangria/macros/derive/DeriveInputObjectTypeMacro.scala b/src/main/scala/sangria/macros/derive/DeriveInputObjectTypeMacro.scala index 8d90d27e..904ce72b 100644 --- a/src/main/scala/sangria/macros/derive/DeriveInputObjectTypeMacro.scala +++ b/src/main/scala/sangria/macros/derive/DeriveInputObjectTypeMacro.scala @@ -11,28 +11,28 @@ class DeriveInputObjectTypeMacro(context: blackbox.Context) extends { val targetType = weakTypeTag[T].tpe val validatedConfig = validateObjectConfig(config, targetType) - val errors = validatedConfig.collect {case Left(error) ⇒ error} + val errors = validatedConfig.collect {case Left(error) => error} if (errors.nonEmpty) reportErrors(errors) else { - val validConfig = validatedConfig.collect {case Right(cfg) ⇒ cfg} + val validConfig = validatedConfig.collect {case Right(cfg) => cfg} collectFields(validConfig, targetType) match { - case Left(errors) ⇒ reportErrors(errors) - case Right(fields) ⇒ + case Left(errors) => reportErrors(errors) + case Right(fields) => val tpeName = q"${targetType.typeSymbol.name.decodedName.toString}" val annotationName = symbolName(targetType.typeSymbol.annotations) - val configName = validConfig.collect{case MacroName(name) ⇒ name}.lastOption + val configName = validConfig.collect{case MacroName(name) => name}.lastOption val annotationDesc = symbolDescription(targetType.typeSymbol.annotations) - val configDesc = validConfig.collect{case MacroDescription(name) ⇒ name}.lastOption + val configDesc = validConfig.collect{case MacroDescription(name) => name}.lastOption q""" sangria.schema.InputObjectType.createFromMacro[$targetType]( ${configName orElse annotationName getOrElse tpeName}, ${configDesc orElse annotationDesc}, - () ⇒ $fields) + () => $fields) """ } } @@ -40,53 +40,53 @@ class DeriveInputObjectTypeMacro(context: blackbox.Context) extends { private def findApplyMethod(tpe: Type): Either[(Position, String), Option[(Type, MethodSymbol)]] = if (tpe.companion =:= NoType) { - Left(c.enclosingPosition → s"Can't find companion object for '$tpe'. This can happen when it's nested too deeply. Please consider defining it as a top-level object or directly inside of another class or object.") + Left(c.enclosingPosition -> s"Can't find companion object for '$tpe'. This can happen when it's nested too deeply. Please consider defining it as a top-level object or directly inside of another class or object.") } else { val applyMethods = tpe.companion.members.collect { - case m: MethodSymbol if m.name.decodedName.toString == "apply" ⇒ m + case m: MethodSymbol if m.name.decodedName.toString == "apply" => m } if (applyMethods.size > 1) - Left(c.enclosingPosition → "Companion object has more than one `apply` method, which is not supported.") + Left(c.enclosingPosition -> "Companion object has more than one `apply` method, which is not supported.") else - Right(Some(tpe.companion → applyMethods.head)) + Right(Some(tpe.companion -> applyMethods.head)) } private def collectFields(config: Seq[MacroSetting], targetType: Type): Either[List[(Position, String)], List[Tree]] = findApplyMethod(targetType) match { - case Right(apply) ⇒ + case Right(apply) => val knownMembers = findKnownMembers(targetType, apply) validateFieldConfig(knownMembers, config) match { - case Nil ⇒ + case Nil => val fields = extractFields(knownMembers, config) - val classFields = fields map { field ⇒ + val classFields = fields map { field => val fieldType = field.method.returnType val name = field.name val annotationName = symbolName(field.annotations) - val configName = config.collect{case MacroRenameField(`name`, tree, _) ⇒ tree}.lastOption + val configName = config.collect{case MacroRenameField(`name`, tree, _) => tree}.lastOption val annotationDescr = symbolDescription(field.annotations) - val configDescr = config.collect{case MacroDocumentField(`name`, tree, _) ⇒ tree}.lastOption + val configDescr = config.collect{case MacroDocumentField(`name`, tree, _) => tree}.lastOption val defaultAnnotation = symbolDefault(field.annotations) - val defaultSig = field.defaultValue.map {case (comp, defaultName) ⇒ q"${comp.typeSymbol.name.toTermName}.$defaultName"} + val defaultSig = field.defaultValue.map {case (comp, defaultName) => q"${comp.typeSymbol.name.toTermName}.$defaultName"} val default = defaultAnnotation orElse defaultSig val fieldName: c.universe.Tree = { val nonTransformedName = configName orElse annotationName getOrElse q"$name" - config.collect{case MacroTransformFieldNames(fnt) ⇒ fnt}.lastOption match { - case Some(fnt) ⇒ q"$fnt($nonTransformedName)" - case None ⇒ nonTransformedName + config.collect{case MacroTransformFieldNames(fnt) => fnt}.lastOption match { + case Some(fnt) => q"$fnt($nonTransformedName)" + case None => nonTransformedName } } default match { - case Some(d) ⇒ + case Some(d) => val ft = if (fieldType.erasure <:< typeOf[Option[_]].erasure) q"sangria.macros.derive.GraphQLInputTypeLookup.finder[$fieldType]().graphqlType" @@ -100,7 +100,7 @@ class DeriveInputObjectTypeMacro(context: blackbox.Context) extends { ${configDescr orElse annotationDescr}, $d) """ - case None ⇒ + case None => q""" sangria.schema.InputField.createFromMacroWithoutDefault( $fieldName, @@ -114,15 +114,15 @@ class DeriveInputObjectTypeMacro(context: blackbox.Context) extends { val allFields = classFields ++ additionalFields(config) if (allFields.nonEmpty) Right(allFields) - else Left(List(c.enclosingPosition → "Input field list is empty")) - case errors ⇒ Left(errors) + else Left(List(c.enclosingPosition -> "Input field list is empty")) + case errors => Left(errors) } - case Left(error) ⇒ reportErrors(error :: Nil) + case Left(error) => reportErrors(error :: Nil) } private def findKnownMembers(tpe: Type, apply: Option[(Type, MethodSymbol)]): List[KnownMember] = tpe.members.collect { - case m: MethodSymbol if m.isCaseAccessor ⇒ + case m: MethodSymbol if m.isCaseAccessor => val (annotations, default) = findCaseClassAccessorAnnotations(tpe, m, apply) KnownMember(tpe, m, annotations, default) @@ -130,111 +130,111 @@ class DeriveInputObjectTypeMacro(context: blackbox.Context) extends { private def findCaseClassAccessorAnnotations(tpe: Type, member: MethodSymbol, applyInfo: Option[(Type, MethodSymbol)]): (List[Annotation], Option[(Type, TermName)]) = applyInfo match { - case Some((companion, apply)) ⇒ + case Some((companion, apply)) => val annotationsConstructors = for { - c ← tpe.members.filter(_.isConstructor) - pl ← c.asMethod.paramLists - p ← pl + c <- tpe.members.filter(_.isConstructor) + pl <- c.asMethod.paramLists + p <- pl if p.name.decodedName.toString == member.name.decodedName.toString } yield p.annotations val defaults = apply.paramLists.flatten.zipWithIndex.find(_._1.name.decodedName.toString == member.name.decodedName.toString) match { - case Some((param: TermSymbol, idx)) if param.isParamWithDefault ⇒ - Some(companion → defaultMethodArgValue(apply.name.decodedName.toString, idx + 1).asInstanceOf[TermName]) - case _ ⇒ None + case Some((param: TermSymbol, idx)) if param.isParamWithDefault => + Some(companion -> defaultMethodArgValue(apply.name.decodedName.toString, idx + 1).asInstanceOf[TermName]) + case _ => None } - annotationsConstructors.toList.flatten → defaults + annotationsConstructors.toList.flatten -> defaults - case None ⇒ - Nil → None + case None => + Nil -> None } private def extractFields(knownMembers: List[KnownMember], config: Seq[MacroSetting]) = { val included = config.foldLeft(Set.empty[String]){ - case (acc, MacroIncludeFields(fields, _)) ⇒ acc ++ fields - case (acc, _) ⇒ acc + case (acc, MacroIncludeFields(fields, _)) => acc ++ fields + case (acc, _) => acc } val excluded = config.foldLeft(Set.empty[String]){ - case (acc, MacroExcludeFields(fields, _)) ⇒ acc ++ fields - case (acc, MacroReplaceField(fieldName, _, _)) ⇒ acc + fieldName - case (acc, _) ⇒ acc + case (acc, MacroExcludeFields(fields, _)) => acc ++ fields + case (acc, MacroReplaceField(fieldName, _, _)) => acc + fieldName + case (acc, _) => acc } val actualIncluded = if (included.nonEmpty) included - else knownMembers.map(m ⇒ m.name).toSet + else knownMembers.map(m => m.name).toSet val actualFields = actualIncluded -- excluded - knownMembers.filter(m ⇒ actualFields.contains(m.name) && !memberExcluded(m.annotations)) + knownMembers.filter(m => actualFields.contains(m.name) && !memberExcluded(m.annotations)) } private def additionalFields(config: Seq[MacroSetting]) = config.foldLeft(List[Tree]()){ - case (acc, MacroReplaceField(_, field, _)) ⇒ acc :+ field - case (acc, _) ⇒ acc + case (acc, MacroReplaceField(_, field, _)) => acc :+ field + case (acc, _) => acc } private def validateFieldConfig(knownMembers: List[KnownMember], config: Seq[MacroSetting]) = { val knownMembersSet = knownMembers.map(_.name).toSet def unknownMember(pos: Position, name: String) = - pos → s"Unknown member '$name'. Known members are: ${knownMembers map (_.name) mkString ", "}" + pos -> s"Unknown member '$name'. Known members are: ${knownMembers map (_.name) mkString ", "}" config.toList.flatMap { - case MacroIncludeFields(fields, pos) if !fields.forall(knownMembersSet.contains) ⇒ + case MacroIncludeFields(fields, pos) if !fields.forall(knownMembersSet.contains) => val unknown = fields.diff(knownMembersSet) unknown.toList.map(unknownMember(pos, _)) - case MacroExcludeFields(fields, pos) if !fields.forall(knownMembersSet.contains) ⇒ + case MacroExcludeFields(fields, pos) if !fields.forall(knownMembersSet.contains) => val unknown = fields.diff(knownMembersSet) unknown.toList.map(unknownMember(pos, _)) - case MacroDocumentField(fieldName, _, pos) if !knownMembersSet.contains(fieldName) ⇒ + case MacroDocumentField(fieldName, _, pos) if !knownMembersSet.contains(fieldName) => unknownMember(pos, fieldName) :: Nil - case MacroRenameField(fieldName, _, pos) if !knownMembersSet.contains(fieldName) ⇒ + case MacroRenameField(fieldName, _, pos) if !knownMembersSet.contains(fieldName) => unknownMember(pos, fieldName) :: Nil - case MacroReplaceField(fieldName, _, pos) if !knownMembersSet.contains(fieldName) ⇒ + case MacroReplaceField(fieldName, _, pos) if !knownMembersSet.contains(fieldName) => unknownMember(pos, fieldName) :: Nil - case _ ⇒ Nil + case _ => Nil } } private def validateObjectConfig(config: Seq[Tree], tpe: Type) = config.map { - case q"$setting.apply($name)" if checkSetting[InputObjectTypeName.type](setting) ⇒ + case q"$setting.apply($name)" if checkSetting[InputObjectTypeName.type](setting) => Right(MacroName(name)) - case q"$setting.apply($description)" if checkSetting[InputObjectTypeDescription.type](setting) ⇒ + case q"$setting.apply($description)" if checkSetting[InputObjectTypeDescription.type](setting) => Right(MacroDescription(description)) - case tree @ q"$setting.apply(${fieldName: String}, $description)" if checkSetting[DocumentInputField.type](setting) ⇒ + case tree @ q"$setting.apply(${fieldName: String}, $description)" if checkSetting[DocumentInputField.type](setting) => Right(MacroDocumentField(fieldName, description, tree.pos)) - case tree @ q"$setting.apply(${fieldName: String}, $graphqlName)" if checkSetting[RenameInputField.type](setting) ⇒ + case tree @ q"$setting.apply(${fieldName: String}, $graphqlName)" if checkSetting[RenameInputField.type](setting) => Right(MacroRenameField(fieldName, graphqlName, tree.pos)) - case tree @ q"$setting.apply(${fieldName: String}, $field)" if checkSetting[ReplaceInputField.type](setting) ⇒ + case tree @ q"$setting.apply(${fieldName: String}, $field)" if checkSetting[ReplaceInputField.type](setting) => Right(MacroReplaceField(fieldName, field, tree.pos)) - case tree @ q"$setting.apply(..${fields: List[String]})" if checkSetting[IncludeInputFields.type](setting) ⇒ + case tree @ q"$setting.apply(..${fields: List[String]})" if checkSetting[IncludeInputFields.type](setting) => Right(MacroIncludeFields(fields.toSet, tree.pos)) - case tree @ q"$setting.apply(..${fields: List[String]})" if checkSetting[ExcludeInputFields.type](setting) ⇒ + case tree @ q"$setting.apply(..${fields: List[String]})" if checkSetting[ExcludeInputFields.type](setting) => Right(MacroExcludeFields(fields.toSet, tree.pos)) - case q"$setting.apply($fn)" if checkSetting[TransformInputFieldNames.type](setting) ⇒ + case q"$setting.apply($fn)" if checkSetting[TransformInputFieldNames.type](setting) => Right(MacroTransformFieldNames(fn)) - case tree ⇒ Left(tree.pos → + case tree => Left(tree.pos -> "Unsupported shape of derivation config. Please define subclasses of `DeriveInputObjectTypeSetting` directly in the argument list of the macro.") } @@ -259,4 +259,4 @@ class DeriveInputObjectTypeMacro(context: blackbox.Context) extends { case class MacroIncludeFields(fieldNames: Set[String], pos: Position) extends MacroSetting case class MacroExcludeFields(fieldNames: Set[String], pos: Position) extends MacroSetting case class MacroTransformFieldNames(transformer: Tree) extends MacroSetting -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/macros/derive/DeriveMacroSupport.scala b/src/main/scala/sangria/macros/derive/DeriveMacroSupport.scala index 87457afa..24139b32 100644 --- a/src/main/scala/sangria/macros/derive/DeriveMacroSupport.scala +++ b/src/main/scala/sangria/macros/derive/DeriveMacroSupport.scala @@ -14,7 +14,7 @@ trait DeriveMacroSupport { val (lastPos, lastError) = errors.last - errors.dropRight(1).foreach{case (pos, error) ⇒ c.error(pos, error)} + errors.dropRight(1).foreach{case (pos, error) => c.error(pos, error)} c.abort(lastPos, lastError) } @@ -22,41 +22,41 @@ trait DeriveMacroSupport { protected def symbolName(annotations: List[Annotation]): Option[Tree] = annotations .map (_.tree) - .collect {case q"new $name($arg)" if name.tpe =:= typeOf[GraphQLName] ⇒ arg} + .collect {case q"new $name($arg)" if name.tpe =:= typeOf[GraphQLName] => arg} .headOption protected def symbolDescription(annotations: List[Annotation]): Option[Tree] = annotations .map (_.tree) - .collect {case q"new $name($arg)" if name.tpe =:= typeOf[GraphQLDescription] ⇒ arg} + .collect {case q"new $name($arg)" if name.tpe =:= typeOf[GraphQLDescription] => arg} .headOption protected def symbolDefault(annotations: List[Annotation]): Option[Tree] = annotations .map (_.tree) - .collect {case q"new $name($arg)" if name.tpe =:= typeOf[GraphQLDefault] ⇒ arg} + .collect {case q"new $name($arg)" if name.tpe =:= typeOf[GraphQLDefault] => arg} .headOption protected def symbolDeprecation(annotations: List[Annotation]): Option[Tree] = annotations .map (_.tree) - .collect {case q"new $name($arg)" if name.tpe =:= typeOf[GraphQLDeprecated] ⇒ arg} + .collect {case q"new $name($arg)" if name.tpe =:= typeOf[GraphQLDeprecated] => arg} .headOption protected def symbolFieldTags(annotations: List[Annotation]): Tree = annotations .map (_.tree) .foldLeft(q"List[sangria.execution.FieldTag]()") { - case (acc, q"new $name(..$fieldTags)") if name.tpe =:= typeOf[GraphQLFieldTags] ⇒ + case (acc, q"new $name(..$fieldTags)") if name.tpe =:= typeOf[GraphQLFieldTags] => q"$acc ++ $fieldTags" - case (acc, _) ⇒ acc + case (acc, _) => acc } protected def memberExcluded(annotations: List[Annotation]): Boolean = - annotations.find(_.tree.tpe =:= typeOf[GraphQLExclude]).fold(false)(_ ⇒ true) + annotations.find(_.tree.tpe =:= typeOf[GraphQLExclude]).fold(false)(_ => true) protected def memberField(annotations: List[Annotation]): Boolean = - annotations.find(_.tree.tpe =:= typeOf[GraphQLField]).fold(false)(_ ⇒ true) + annotations.find(_.tree.tpe =:= typeOf[GraphQLField]).fold(false)(_ => true) // TODO: most probably not needed, so should be removed in future protected def defaultMethodArgValue(method: String, pos: Int) = { diff --git a/src/main/scala/sangria/macros/derive/DeriveObjectSetting.scala b/src/main/scala/sangria/macros/derive/DeriveObjectSetting.scala index 1091f196..462f98a9 100644 --- a/src/main/scala/sangria/macros/derive/DeriveObjectSetting.scala +++ b/src/main/scala/sangria/macros/derive/DeriveObjectSetting.scala @@ -13,7 +13,7 @@ case class DocumentField[Ctx, Val](fieldName: String, description: String, depre case class DeprecateField[Ctx, Val](fieldName: String, deprecationReason: String) extends DeriveObjectSetting[Ctx, Val] case class RenameField[Ctx, Val](fieldName: String, graphqlName: String) extends DeriveObjectSetting[Ctx, Val] case class FieldTags[Ctx, Val](fieldName: String, tags: FieldTag*) extends DeriveObjectSetting[Ctx, Val] -case class FieldComplexity[Ctx, Val](fieldName: String, complexity: (Ctx, Args, Double) ⇒ Double) extends DeriveObjectSetting[Ctx, Val] +case class FieldComplexity[Ctx, Val](fieldName: String, complexity: (Ctx, Args, Double) => Double) extends DeriveObjectSetting[Ctx, Val] case class IncludeFields[Ctx, Val](fieldNames: String*) extends DeriveObjectSetting[Ctx, Val] case class IncludeMethods[Ctx, Val](methodNames: String*) extends DeriveObjectSetting[Ctx, Val] @@ -21,10 +21,10 @@ case class ExcludeFields[Ctx, Val](fieldNames: String*) extends DeriveObjectSett case class ReplaceField[Ctx, Val](fieldName: String, field: Field[Ctx, Val]) extends DeriveObjectSetting[Ctx, Val] case class AddFields[Ctx, Val](fields: Field[Ctx, Val]*) extends DeriveObjectSetting[Ctx, Val] -case class TransformFieldNames[Ctx, Val](transformer: String ⇒ String) extends DeriveObjectSetting[Ctx, Val] +case class TransformFieldNames[Ctx, Val](transformer: String => String) extends DeriveObjectSetting[Ctx, Val] case class MethodArgumentRename[Ctx, Val](methodName: String, argName: String, newName: String) extends DeriveObjectSetting[Ctx, Val] case class MethodArgumentDescription[Ctx, Val](methodName: String, argName: String, description: String) extends DeriveObjectSetting[Ctx, Val] case class MethodArgumentsDescription[Ctx, Val](methodName: String, descriptions: (String, String)*) extends DeriveObjectSetting[Ctx, Val] case class MethodArgumentDefault[Ctx, Val, Arg](methodName: String, argName: String, default: Arg) extends DeriveObjectSetting[Ctx, Val] -case class MethodArgument[Ctx, Val, Arg](methodName: String, argName: String, description: String, default: Arg) extends DeriveObjectSetting[Ctx, Val] \ No newline at end of file +case class MethodArgument[Ctx, Val, Arg](methodName: String, argName: String, description: String, default: Arg) extends DeriveObjectSetting[Ctx, Val] diff --git a/src/main/scala/sangria/macros/derive/DeriveObjectTypeMacro.scala b/src/main/scala/sangria/macros/derive/DeriveObjectTypeMacro.scala index cf5e1de7..d8f3b74b 100644 --- a/src/main/scala/sangria/macros/derive/DeriveObjectTypeMacro.scala +++ b/src/main/scala/sangria/macros/derive/DeriveObjectTypeMacro.scala @@ -16,7 +16,7 @@ class DeriveObjectTypeMacro(context: blackbox.Context) extends { val ctxVal = weakTypeTag[CtxVal] val v = weakTypeTag[Val] - deriveObjectType(ctx.tpe, Some(ctxVal.tpe → fn), v.tpe, config) + deriveObjectType(ctx.tpe, Some(ctxVal.tpe -> fn), v.tpe, config) } def deriveNormalObjectType[Ctx : WeakTypeTag, Val : WeakTypeTag](config: Tree*) = { @@ -30,36 +30,36 @@ class DeriveObjectTypeMacro(context: blackbox.Context) extends { val targetType = ctxValType.fold(valType)(_._1) val validatedConfig = validateObjectConfig(config, targetType) - val errors = validatedConfig.collect {case Left(error) ⇒ error} + val errors = validatedConfig.collect {case Left(error) => error} if (errors.nonEmpty) reportErrors(errors) else { - val validConfig = validatedConfig.collect {case Right(cfg) ⇒ cfg} + val validConfig = validatedConfig.collect {case Right(cfg) => cfg} collectFields(validConfig, ctxType, targetType, valType, ctxValType.isDefined) match { - case Left(errors) ⇒ reportErrors(errors) - case Right(fields) ⇒ + case Left(errors) => reportErrors(errors) + case Right(fields) => val tpeName = q"${targetType.typeSymbol.name.decodedName.toString}" val annotationName = symbolName(targetType.typeSymbol.annotations) - val configName = validConfig.collect{case MacroName(name) ⇒ name}.lastOption + val configName = validConfig.collect{case MacroName(name) => name}.lastOption val annotationDesc = symbolDescription(targetType.typeSymbol.annotations) - val configDesc = validConfig.collect{case MacroDescription(name) ⇒ name}.lastOption + val configDesc = validConfig.collect{case MacroDescription(name) => name}.lastOption val interfaces = validConfig.foldLeft(List[Tree]()) { - case (acc, MacroInterfaces(tree)) ⇒ acc ++ tree.map(i ⇒ q"$i.interfaceType") - case (acc, _) ⇒ acc + case (acc, MacroInterfaces(tree)) => acc ++ tree.map(i => q"$i.interfaceType") + case (acc, _) => acc } q""" - ${ctxValType.fold(q"")(cv ⇒ q"val valFn = ${cv._2}")} + ${ctxValType.fold(q"")(cv => q"val valFn = ${cv._2}")} sangria.schema.ObjectType.createFromMacro( ${configName orElse annotationName getOrElse tpeName}, ${configDesc orElse annotationDesc}, $interfaces, - () ⇒ ${fields map c.untypecheck}) + () => ${fields map c.untypecheck}) """ } } @@ -67,18 +67,18 @@ class DeriveObjectTypeMacro(context: blackbox.Context) extends { private def collectFields(config: Seq[MacroDeriveObjectSetting], ctxType: Type, targetType: Type, valType: Type, useFn: Boolean): Either[List[(Position, String)], List[Tree]] = { val knownMembers = findKnownMembers(targetType, config.foldLeft(Set.empty[String]) { - case (acc, MacroIncludeMethods(methods)) ⇒ acc ++ methods - case (acc, _) ⇒ acc + case (acc, MacroIncludeMethods(methods)) => acc ++ methods + case (acc, _) => acc }) validateFieldConfig(knownMembers, config) match { - case Nil ⇒ + case Nil => val fields = extractFields(knownMembers, config) - val classFields = fields map { field ⇒ + val classFields = fields map { field => val (args, resolve) = if (field.accessor) - Nil → q"(c: sangria.schema.Context[$ctxType, $valType]) ⇒ ${if (useFn) q"valFn(c.ctx)" else q"c.value"}.${field.method.name}" + Nil -> q"(c: sangria.schema.Context[$ctxType, $valType]) => ${if (useFn) q"valFn(c.ctx)" else q"c.value"}.${field.method.name}" else fieldWithArguments(config, field, ctxType, valType, useFn) @@ -87,29 +87,29 @@ class DeriveObjectTypeMacro(context: blackbox.Context) extends { val name = field.name val annotationName = symbolName(field.annotations) - val configName = config.collect{case MacroRenameField(`name`, tree, _) ⇒ tree}.lastOption + val configName = config.collect{case MacroRenameField(`name`, tree, _) => tree}.lastOption val annotationDescr = symbolDescription(field.annotations) - val configDescr = config.collect{case MacroDocumentField(`name`, tree, _, _) ⇒ tree}.lastOption + val configDescr = config.collect{case MacroDocumentField(`name`, tree, _, _) => tree}.lastOption val annotationDepr = symbolDeprecation(field.annotations) - val configDocDepr = config.collect{case MacroDocumentField(`name`, _, reason, _) ⇒ reason}.lastOption getOrElse q"None" - val configDepr = config.collect{case MacroDeprecateField(`name`, reason, _) ⇒ reason}.lastOption getOrElse q"None" + val configDocDepr = config.collect{case MacroDocumentField(`name`, _, reason, _) => reason}.lastOption getOrElse q"None" + val configDepr = config.collect{case MacroDeprecateField(`name`, reason, _) => reason}.lastOption getOrElse q"None" - val complexity = config.collect{case MacroFieldComplexity(`name`, c, _) ⇒ c}.lastOption + val complexity = config.collect{case MacroFieldComplexity(`name`, c, _) => c}.lastOption val annotationTags = symbolFieldTags(field.annotations) val configTags = config.foldLeft(q"List[sangria.execution.FieldTag]()") { - case (acc, MacroFieldTags(`name`, tree, _)) ⇒ q"$acc ++ ${tree.toList}" - case (acc, _) ⇒ acc + case (acc, MacroFieldTags(`name`, tree, _)) => q"$acc ++ ${tree.toList}" + case (acc, _) => acc } val fieldName: c.universe.Tree = { val nonTransformedName = configName orElse annotationName getOrElse q"$name" - config.collect{case MacroTransformFieldNames(fnt) ⇒ fnt}.lastOption match { - case Some(fnt) ⇒ q"$fnt($nonTransformedName)" - case None ⇒ nonTransformedName + config.collect{case MacroTransformFieldNames(fnt) => fnt}.lastOption match { + case Some(fnt) => q"$fnt($nonTransformedName)" + case None => nonTransformedName } } @@ -130,8 +130,8 @@ class DeriveObjectTypeMacro(context: blackbox.Context) extends { val allFields = classFields ++ additionalFields(config) if (allFields.nonEmpty) Right(allFields) - else Left(List(c.enclosingPosition → s"$targetType: Field list is empty")) - case errors ⇒ Left(errors) + else Left(List(c.enclosingPosition -> s"$targetType: Field list is empty")) + case errors => Left(errors) } } @@ -153,31 +153,31 @@ class DeriveObjectTypeMacro(context: blackbox.Context) extends { private def fieldWithArguments(config: Seq[MacroDeriveObjectSetting], member: KnownMember, ctxType: Type, valType: Type, useFn: Boolean) = { val args = member.method.paramLists.map(_ map createArg(config, member)) val argsAst = args map (_ map { - case NormalArg(name, tpe, _, false) ⇒ q"c.arg[$tpe]($name)" - case NormalArg(name, tpe, _, true) ⇒ q"c.argOpt[$tpe]($name)" - case ContextArg ⇒ q"c" + case NormalArg(name, tpe, _, false) => q"c.arg[$tpe]($name)" + case NormalArg(name, tpe, _, true) => q"c.argOpt[$tpe]($name)" + case ContextArg => q"c" }) - args.flatten.collect{case na: NormalArg ⇒ na.tree} → - q"(c: sangria.schema.Context[$ctxType, $valType]) ⇒ ${if (useFn) q"valFn(c.ctx)" else q"c.value"}.${member.method.name}(...$argsAst)" + args.flatten.collect{case na: NormalArg => na.tree} -> + q"(c: sangria.schema.Context[$ctxType, $valType]) => ${if (useFn) q"valFn(c.ctx)" else q"c.value"}.${member.method.name}(...$argsAst)" } private def createArg(config: Seq[MacroDeriveObjectSetting], member: KnownMember)(arg: Symbol) = arg match { - case term: TermSymbol if term.typeSignature.resultType.erasure =:= typeOf[Context[_, _]].erasure ⇒ + case term: TermSymbol if term.typeSignature.resultType.erasure =:= typeOf[Context[_, _]].erasure => ContextArg - case term: TermSymbol ⇒ + case term: TermSymbol => val tpe = term.typeSignature.resultType val methodName = member.method.name.decodedName.toString val argName = term.name.decodedName.toString val name = collectArgRename(config, methodName, argName) orElse - symbolName(term.annotations).collect {case q"${s: String}" ⇒ s} getOrElse argName + symbolName(term.annotations).collect {case q"${s: String}" => s} getOrElse argName val description = collectArgDescription(config, methodName, argName) orElse symbolDescription(term.annotations) val default = collectArgDefault(config, methodName, argName) orElse symbolDefault(term.annotations) val ast = default match { - case Some(defaultValue) ⇒ + case Some(defaultValue) => q""" sangria.schema.Argument.createWithDefault( $name, @@ -185,7 +185,7 @@ class DeriveObjectTypeMacro(context: blackbox.Context) extends { $description, $defaultValue) """ - case None ⇒ + case None => q""" sangria.schema.Argument.createWithoutDefault( $name, @@ -209,11 +209,11 @@ class DeriveObjectTypeMacro(context: blackbox.Context) extends { // we "force" m by calling info. This makes sure its type information is complete, in particular that // annotations are available through `.annotations` tpe.members.map { m => m.info; m }.collect { - case m: MethodSymbol if m.isCaseAccessor ⇒ + case m: MethodSymbol if m.isCaseAccessor => KnownMember(tpe, m, findCaseClassAccessorAnnotations(tpe, m), accessor = true) - case m: MethodSymbol if memberField(m.annotations) || includeMethods.contains(m.name.decodedName.toString) ⇒ + case m: MethodSymbol if memberField(m.annotations) || includeMethods.contains(m.name.decodedName.toString) => KnownMember(tpe, m, m.annotations, accessor = false) - case value: TermSymbol if value.isVal && (memberField(value.annotations) || includeMethods.contains(value.name.decodedName.toString)) ⇒ + case value: TermSymbol if value.isVal && (memberField(value.annotations) || includeMethods.contains(value.name.decodedName.toString)) => KnownMember(tpe, value.getter.asMethod, value.annotations, accessor = false) }.toList.reverse @@ -222,9 +222,9 @@ class DeriveObjectTypeMacro(context: blackbox.Context) extends { else { val annotationsConstructors = for { - c ← tpe.members.filter(_.isConstructor) - pl ← c.asMethod.paramLists - p ← pl + c <- tpe.members.filter(_.isConstructor) + pl <- c.asMethod.paramLists + p <- pl if p.name.decodedName.toString == member.name.decodedName.toString } yield p.annotations @@ -233,177 +233,177 @@ class DeriveObjectTypeMacro(context: blackbox.Context) extends { private def extractFields(knownMembers: List[KnownMember], config: Seq[MacroDeriveObjectSetting]) = { val included = config.foldLeft(Set.empty[String]){ - case (acc, MacroIncludeFields(fields, _)) ⇒ acc ++ fields - case (acc, _) ⇒ acc + case (acc, MacroIncludeFields(fields, _)) => acc ++ fields + case (acc, _) => acc } val excluded = config.foldLeft(Set.empty[String]){ - case (acc, MacroExcludeFields(fields, _)) ⇒ acc ++ fields - case (acc, MacroReplaceField(fieldName, _, _)) ⇒ acc + fieldName - case (acc, _) ⇒ acc + case (acc, MacroExcludeFields(fields, _)) => acc ++ fields + case (acc, MacroReplaceField(fieldName, _, _)) => acc + fieldName + case (acc, _) => acc } val actualIncluded = if (included.nonEmpty) included - else knownMembers.map(m ⇒ m.name).toSet + else knownMembers.map(m => m.name).toSet val actualFields = actualIncluded -- excluded - knownMembers.filter(m ⇒ actualFields.contains(m.name) && !memberExcluded(m.annotations)) + knownMembers.filter(m => actualFields.contains(m.name) && !memberExcluded(m.annotations)) } private def validateFieldConfig(knownMembers: List[KnownMember], config: Seq[MacroDeriveObjectSetting]) = { val knownMembersSet = knownMembers.map(_.name).toSet def unknownMember(pos: Position, name: String) = - pos → s"Unknown member '$name'. Known members are: ${knownMembers map (_.name) mkString ", "}" + pos -> s"Unknown member '$name'. Known members are: ${knownMembers map (_.name) mkString ", "}" def getMethod(pos: Position, name: String) = knownMembers.withFilter(_.name == name).map(_.method) match { - case method :: Nil ⇒ Right(method) - case Nil ⇒ Left(unknownMember(pos, name) :: Nil) - case _ ⇒ Left(List( - pos → s"Cannot configure overloaded method '$name' using `DeriveObjectSetting` due to ambiguity, use annotations instead." + case method :: Nil => Right(method) + case Nil => Left(unknownMember(pos, name) :: Nil) + case _ => Left(List( + pos -> s"Cannot configure overloaded method '$name' using `DeriveObjectSetting` due to ambiguity, use annotations instead." )) } - def getArgument(pos: Position, methodName: String, argName: String) = getMethod(pos, methodName).right.flatMap{ method ⇒ + def getArgument(pos: Position, methodName: String, argName: String) = getMethod(pos, methodName).right.flatMap{ method => val knownArguments = method.paramLists.flatten knownArguments.find(_.name.decodedName.toString == argName) .map(Right(_)) .getOrElse(Left(List( - pos → s"Unknown argument '$argName' of method '$method'. Known arguments are: ${knownArguments.map(_.name.decodedName) mkString ", "}" + pos -> s"Unknown argument '$argName' of method '$method'. Known arguments are: ${knownArguments.map(_.name.decodedName) mkString ", "}" ))) } def validateHasArgument(pos: Position, methodName: String, argName: String) = - getArgument(pos, methodName, argName).right.map(_ ⇒ Nil).merge + getArgument(pos, methodName, argName).right.map(_ => Nil).merge config.toList.flatMap { - case MacroIncludeFields(fields, pos) if !fields.forall(knownMembersSet.contains) ⇒ + case MacroIncludeFields(fields, pos) if !fields.forall(knownMembersSet.contains) => val unknown = fields.diff(knownMembersSet) unknown.toList.map(unknownMember(pos, _)) - case MacroExcludeFields(fields, pos) if !fields.forall(knownMembersSet.contains) ⇒ + case MacroExcludeFields(fields, pos) if !fields.forall(knownMembersSet.contains) => val unknown = fields.diff(knownMembersSet) unknown.toList.map(unknownMember(pos, _)) - case MacroDocumentField(fieldName, _, _, pos) if !knownMembersSet.contains(fieldName) ⇒ + case MacroDocumentField(fieldName, _, _, pos) if !knownMembersSet.contains(fieldName) => unknownMember(pos, fieldName) :: Nil - case MacroRenameField(fieldName, _, pos) if !knownMembersSet.contains(fieldName) ⇒ + case MacroRenameField(fieldName, _, pos) if !knownMembersSet.contains(fieldName) => unknownMember(pos, fieldName) :: Nil - case MacroFieldTags(fieldName, _, pos) if !knownMembersSet.contains(fieldName) ⇒ + case MacroFieldTags(fieldName, _, pos) if !knownMembersSet.contains(fieldName) => unknownMember(pos, fieldName) :: Nil - case MacroDeprecateField(fieldName, _, pos) if !knownMembersSet.contains(fieldName) ⇒ + case MacroDeprecateField(fieldName, _, pos) if !knownMembersSet.contains(fieldName) => unknownMember(pos, fieldName) :: Nil - case MacroFieldComplexity(fieldName, _, pos) if !knownMembersSet.contains(fieldName) ⇒ + case MacroFieldComplexity(fieldName, _, pos) if !knownMembersSet.contains(fieldName) => unknownMember(pos, fieldName) :: Nil - case MacroReplaceField(fieldName, _, pos) if !knownMembersSet.contains(fieldName) ⇒ + case MacroReplaceField(fieldName, _, pos) if !knownMembersSet.contains(fieldName) => unknownMember(pos, fieldName) :: Nil - case MacroMethodArgumentRename(methodName, argName, _, pos) ⇒ + case MacroMethodArgumentRename(methodName, argName, _, pos) => validateHasArgument(pos, methodName, argName) - case MacroMethodArgumentDescription(methodName, argName, _, pos) ⇒ + case MacroMethodArgumentDescription(methodName, argName, _, pos) => validateHasArgument(pos, methodName, argName) - case MacroMethodArgumentsDescription(methodName, descriptions, pos) ⇒ + case MacroMethodArgumentsDescription(methodName, descriptions, pos) => descriptions.keys.toList.flatMap(validateHasArgument(pos, methodName, _)) - case MacroMethodArgumentDefault(methodName, argName, _, _, pos) ⇒ + case MacroMethodArgumentDefault(methodName, argName, _, _, pos) => validateHasArgument(pos, methodName, argName) - case MacroMethodArgument(methodName, argName, _, _, _, pos) ⇒ + case MacroMethodArgument(methodName, argName, _, _, _, pos) => validateHasArgument(pos, methodName, argName) - case _ ⇒ Nil + case _ => Nil } } private def additionalFields(config: Seq[MacroDeriveObjectSetting]) = config.foldLeft(List[Tree]()){ - case (acc, MacroAddFields(fields)) ⇒ acc ++ fields - case (acc, MacroReplaceField(_, field, _)) ⇒ acc :+ field - case (acc, _) ⇒ acc + case (acc, MacroAddFields(fields)) => acc ++ fields + case (acc, MacroReplaceField(_, field, _)) => acc :+ field + case (acc, _) => acc } private def validateObjectConfig(config: Seq[Tree], tpe: Type) = config.map { - case q"$setting.apply[$_, $_]($name)" if checkSetting[ObjectTypeName.type](setting) ⇒ + case q"$setting.apply[$_, $_]($name)" if checkSetting[ObjectTypeName.type](setting) => Right(MacroName(name)) - case q"$setting.apply[$_, $_]($description)" if checkSetting[ObjectTypeDescription.type](setting) ⇒ + case q"$setting.apply[$_, $_]($description)" if checkSetting[ObjectTypeDescription.type](setting) => Right(MacroDescription(description)) - case q"$setting.apply[$_, $_](..$ints)" if checkSetting[Interfaces.type](setting) ⇒ + case q"$setting.apply[$_, $_](..$ints)" if checkSetting[Interfaces.type](setting) => Right(MacroInterfaces(ints)) - case tree @ q"$setting.apply[$_, $_](${fieldName: String}, $description, $deprecationReason)" if checkSetting[DocumentField.type](setting) ⇒ + case tree @ q"$setting.apply[$_, $_](${fieldName: String}, $description, $deprecationReason)" if checkSetting[DocumentField.type](setting) => Right(MacroDocumentField(fieldName, description, deprecationReason, tree.pos)) - case tree @ q"$setting.apply[$_, $_](${fieldName: String}, $graphqlName)" if checkSetting[RenameField.type](setting) ⇒ + case tree @ q"$setting.apply[$_, $_](${fieldName: String}, $graphqlName)" if checkSetting[RenameField.type](setting) => Right(MacroRenameField(fieldName, graphqlName, tree.pos)) - case tree @ q"$setting.apply[$_, $_](${fieldName: String}, ..$fieldTags)" if checkSetting[FieldTags.type](setting) ⇒ + case tree @ q"$setting.apply[$_, $_](${fieldName: String}, ..$fieldTags)" if checkSetting[FieldTags.type](setting) => Right(MacroFieldTags(fieldName, fieldTags, tree.pos)) - case tree @ q"$setting.apply[$_, $_](${fieldName: String}, $deprecationReason)" if checkSetting[DeprecateField.type](setting) ⇒ + case tree @ q"$setting.apply[$_, $_](${fieldName: String}, $deprecationReason)" if checkSetting[DeprecateField.type](setting) => Right(MacroDeprecateField(fieldName, q"Some($deprecationReason)", tree.pos)) - case tree @ q"$setting.apply[$_, $_](${fieldName: String}, $complexity)" if checkSetting[FieldComplexity.type](setting) ⇒ + case tree @ q"$setting.apply[$_, $_](${fieldName: String}, $complexity)" if checkSetting[FieldComplexity.type](setting) => Right(MacroFieldComplexity(fieldName, complexity, tree.pos)) - case tree @ q"$setting.apply[$_, $_](..${fields: List[String]})" if checkSetting[IncludeFields.type](setting) ⇒ + case tree @ q"$setting.apply[$_, $_](..${fields: List[String]})" if checkSetting[IncludeFields.type](setting) => Right(MacroIncludeFields(fields.toSet, tree.pos)) - case tree @ q"$setting.apply[$_, $_](..${methods: List[String]})" if checkSetting[IncludeMethods.type](setting) ⇒ - val known = tpe.members.collect {case m: MethodSymbol ⇒ m.name.decodedName.toString}.toSet + case tree @ q"$setting.apply[$_, $_](..${methods: List[String]})" if checkSetting[IncludeMethods.type](setting) => + val known = tpe.members.collect {case m: MethodSymbol => m.name.decodedName.toString}.toSet val unknown = methods filterNot known.contains if (unknown.isEmpty) Right(MacroIncludeMethods(methods.toSet)) - else Left(tree.pos → s"Unknown members: ${unknown mkString ", "}. Known members are: ${known mkString ", "}") + else Left(tree.pos -> s"Unknown members: ${unknown mkString ", "}. Known members are: ${known mkString ", "}") - case tree @ q"$setting.apply[$_, $_](..${fields: List[String]})" if checkSetting[ExcludeFields.type](setting) ⇒ + case tree @ q"$setting.apply[$_, $_](..${fields: List[String]})" if checkSetting[ExcludeFields.type](setting) => Right(MacroExcludeFields(fields.toSet, tree.pos)) - case q"$setting.apply[$_, $_](..$fields)" if checkSetting[AddFields.type](setting) ⇒ + case q"$setting.apply[$_, $_](..$fields)" if checkSetting[AddFields.type](setting) => Right(MacroAddFields(fields)) - case tree @ q"$setting.apply[$_, $_](${fieldName: String}, $field)" if checkSetting[ReplaceField.type](setting) ⇒ + case tree @ q"$setting.apply[$_, $_](${fieldName: String}, $field)" if checkSetting[ReplaceField.type](setting) => Right(MacroReplaceField(fieldName, field, tree.pos)) - case q"$setting.apply[$_, $_]($fn)" if checkSetting[TransformFieldNames.type](setting) ⇒ + case q"$setting.apply[$_, $_]($fn)" if checkSetting[TransformFieldNames.type](setting) => Right(MacroTransformFieldNames(fn)) - case tree @ q"$setting.apply[$_, $_](${methodName: String}, ${argName: String}, ${newName: String})" if checkSetting[MethodArgumentRename.type](setting) ⇒ + case tree @ q"$setting.apply[$_, $_](${methodName: String}, ${argName: String}, ${newName: String})" if checkSetting[MethodArgumentRename.type](setting) => Right(MacroMethodArgumentRename(methodName, argName, newName, tree.pos)) - case tree @ q"$setting.apply[$_, $_](${methodName: String}, ${argName: String}, $description)" if checkSetting[MethodArgumentDescription.type](setting) ⇒ + case tree @ q"$setting.apply[$_, $_](${methodName: String}, ${argName: String}, $description)" if checkSetting[MethodArgumentDescription.type](setting) => Right(MacroMethodArgumentDescription(methodName, argName, description, tree.pos)) - case tree @ q"$setting.apply[$_, $_](${methodName: String}, ..$descriptions)" if checkSetting[MethodArgumentsDescription.type](setting) ⇒ + case tree @ q"$setting.apply[$_, $_](${methodName: String}, ..$descriptions)" if checkSetting[MethodArgumentsDescription.type](setting) => val descriptionsMap = descriptions.map{ - case q"(${argName: String}, $description)" ⇒ argName → description - case q"scala.this.Predef.ArrowAssoc[$_](${argName: String}).->[$_]($description)" ⇒ argName → description // scala 2.11 - case q"scala.this.Predef.ArrowAssoc[$_](${argName: String}).→[$_]($description)" ⇒ argName → description // scala 2.11 - case q"scala.Predef.ArrowAssoc[$_](${argName: String}).->[$_]($description)" ⇒ argName → description // scala 2.12 - case q"scala.Predef.ArrowAssoc[$_](${argName: String}).→[$_]($description)" ⇒ argName → description // scala 2.12 + case q"(${argName: String}, $description)" => argName -> description + case q"scala.this.Predef.ArrowAssoc[$_](${argName: String}).->[$_]($description)" => argName -> description // scala 2.11 + case q"scala.this.Predef.ArrowAssoc[$_](${argName: String}).->[$_]($description)" => argName -> description // scala 2.11 + case q"scala.Predef.ArrowAssoc[$_](${argName: String}).->[$_]($description)" => argName -> description // scala 2.12 + case q"scala.Predef.ArrowAssoc[$_](${argName: String}).->[$_]($description)" => argName -> description // scala 2.12 }.toMap Right(MacroMethodArgumentsDescription(methodName, descriptionsMap, tree.pos)) - case tree @ q"$setting.apply[$_, $_, ${arg: Type}](${methodName: String}, ${argName: String}, $default)" if checkSetting[MethodArgumentDefault.type](setting) ⇒ + case tree @ q"$setting.apply[$_, $_, ${arg: Type}](${methodName: String}, ${argName: String}, $default)" if checkSetting[MethodArgumentDefault.type](setting) => Right(MacroMethodArgumentDefault(methodName, argName, arg, default, tree.pos)) - case tree @ q"$setting.apply[$_, $_, ${arg: Type}](${methodName: String}, ${argName: String}, $description, $default)" if checkSetting[MethodArgument.type](setting) ⇒ + case tree @ q"$setting.apply[$_, $_, ${arg: Type}](${methodName: String}, ${argName: String}, $description, $default)" if checkSetting[MethodArgument.type](setting) => Right(MacroMethodArgument(methodName, argName, description, arg, default, tree.pos)) - case tree ⇒ Left(tree.pos → + case tree => Left(tree.pos -> "Unsupported shape of derivation config. Please define subclasses of `DeriveObjectTypeSetting` directly in the argument list of the macro.") } @@ -446,13 +446,13 @@ class DeriveObjectTypeMacro(context: blackbox.Context) extends { }.lastOption private def collectArgDescription(config: Seq[MacroDeriveObjectSetting], methodName: String, argName: String) = config.collect{ - case MacroMethodArgumentDescription(`methodName`, `argName`, description, _) ⇒ Some(description) - case MacroMethodArgumentsDescription(`methodName`, descriptions, _) ⇒ descriptions.get(argName) - case MacroMethodArgument(`methodName`, `argName`, description, _, _, _) ⇒ Some(description) + case MacroMethodArgumentDescription(`methodName`, `argName`, description, _) => Some(description) + case MacroMethodArgumentsDescription(`methodName`, descriptions, _) => descriptions.get(argName) + case MacroMethodArgument(`methodName`, `argName`, description, _, _, _) => Some(description) }.flatten.lastOption private def collectArgDefault(config: Seq[MacroDeriveObjectSetting], methodName: String, argName: String) = config.collect{ - case MacroMethodArgumentDefault(`methodName`, `argName`, _, default, _) ⇒ default - case MacroMethodArgument(`methodName`, `argName`, _, _, default, _) ⇒ default + case MacroMethodArgumentDefault(`methodName`, `argName`, _, default, _) => default + case MacroMethodArgument(`methodName`, `argName`, _, _, default, _) => default }.lastOption -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/macros/derive/package.scala b/src/main/scala/sangria/macros/derive/package.scala index e1966ecb..0ffc1b82 100644 --- a/src/main/scala/sangria/macros/derive/package.scala +++ b/src/main/scala/sangria/macros/derive/package.scala @@ -1,12 +1,12 @@ package sangria.macros -import scala.language.experimental.{macros ⇒ `scalac, please just let me do it!`} +import scala.language.experimental.{macros => `scalac, please just let me do it!`} import sangria.schema.{InputObjectType, EnumType, ObjectType} package object derive { - def deriveContextObjectType[Ctx, CtxVal, Val](fn: Ctx ⇒ CtxVal, config: DeriveObjectSetting[Ctx, Val]*): ObjectType[Ctx, Val] = + def deriveContextObjectType[Ctx, CtxVal, Val](fn: Ctx => CtxVal, config: DeriveObjectSetting[Ctx, Val]*): ObjectType[Ctx, Val] = macro DeriveObjectTypeMacro.deriveContextObjectType[Ctx, CtxVal, Val] def deriveObjectType[Ctx, Val](config: DeriveObjectSetting[Ctx, Val]*): ObjectType[Ctx, Val] = diff --git a/src/main/scala/sangria/macros/package.scala b/src/main/scala/sangria/macros/package.scala index 7ae3a970..11d08d74 100644 --- a/src/main/scala/sangria/macros/package.scala +++ b/src/main/scala/sangria/macros/package.scala @@ -1,6 +1,6 @@ package sangria -import scala.language.experimental.{macros ⇒ `scalac, please just let me do it!`} +import scala.language.experimental.{macros => `scalac, please just let me do it!`} import sangria.ast.{Document, InputDocument, Value} package object macros { diff --git a/src/main/scala/sangria/marshalling/queryAst.scala b/src/main/scala/sangria/marshalling/queryAst.scala index 8ee3a432..c93c8a31 100644 --- a/src/main/scala/sangria/marshalling/queryAst.scala +++ b/src/main/scala/sangria/marshalling/queryAst.scala @@ -50,14 +50,14 @@ class QueryAstInputUnmarshaller extends InputUnmarshaller[ast.Value] { def isScalarNode(node: ast.Value) = node.isInstanceOf[ast.ScalarValue] def getScalarValue(node: ast.Value) = node def getScalaScalarValue(node: ast.Value) = node match { - case ast.BooleanValue(b, _, _) ⇒ b - case ast.BigIntValue(i, _, _) ⇒ i - case ast.BigDecimalValue(d, _, _) ⇒ d - case ast.FloatValue(f, _, _) ⇒ f - case ast.IntValue(i, _, _) ⇒ i - case ast.StringValue(s, _, _, _, _) ⇒ s - case ast.EnumValue(s, _, _) ⇒ s - case node ⇒ throw new IllegalStateException("Unsupported scalar node: " + node) + case ast.BooleanValue(b, _, _) => b + case ast.BigIntValue(i, _, _) => i + case ast.BigDecimalValue(d, _, _) => d + case ast.FloatValue(f, _, _) => f + case ast.IntValue(i, _, _) => i + case ast.StringValue(s, _, _, _, _) => s + case ast.EnumValue(s, _, _) => s + case node => throw new IllegalStateException("Unsupported scalar node: " + node) } def isVariableNode(node: ast.Value) = node.isInstanceOf[ast.VariableValue] @@ -74,31 +74,31 @@ class QueryAstResultMarshaller extends ResultMarshaller { def addMapNodeElem(builder: MapBuilder, key: String, value: Node, optional: Boolean) = builder.add(key, value) def scalarNode(value: Any, typeName: String, info: Set[ScalarValueInfo]) = value match { - case v: String ⇒ ast.StringValue(v) - case v: Boolean ⇒ ast.BooleanValue(v) - case v: Int ⇒ ast.IntValue(v) - case v: Long ⇒ ast.BigIntValue(v) - case v: Float ⇒ ast.FloatValue(v) - case v: Double ⇒ ast.FloatValue(v) - case v: BigInt ⇒ ast.BigIntValue(v) - case v: BigDecimal ⇒ ast.BigDecimalValue(v) - case v ⇒ throw new IllegalArgumentException("Unsupported scalar value: " + v) + case v: String => ast.StringValue(v) + case v: Boolean => ast.BooleanValue(v) + case v: Int => ast.IntValue(v) + case v: Long => ast.BigIntValue(v) + case v: Float => ast.FloatValue(v) + case v: Double => ast.FloatValue(v) + case v: BigInt => ast.BigIntValue(v) + case v: BigDecimal => ast.BigDecimalValue(v) + case v => throw new IllegalArgumentException("Unsupported scalar value: " + v) } def enumNode(value: String, typeName: String) = ast.EnumValue(value) def arrayNode(values: Vector[Node]) = ast.ListValue(values.toVector) def optionalArrayNodeValue(value: Option[Node]) = value match { - case Some(v) ⇒ v - case None ⇒ nullNode + case Some(v) => v + case None => nullNode } def mapNode(builder: MapBuilder) = mapNode(builder.toList) def mapNode(keyValues: Seq[(String, Node)]) = - ast.ObjectValue(keyValues.toVector.map{case (k, v) ⇒ ast.ObjectField(k, v)}) + ast.ObjectValue(keyValues.toVector.map{case (k, v) => ast.ObjectField(k, v)}) def nullNode = ast.NullValue() def renderCompact(node: Node) = QueryRenderer.render(node, QueryRenderer.Compact) def renderPretty(node: Node) = QueryRenderer.render(node, QueryRenderer.Pretty) -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/parser/PositionTracking.scala b/src/main/scala/sangria/parser/PositionTracking.scala index 618d86d4..e51ab827 100644 --- a/src/main/scala/sangria/parser/PositionTracking.scala +++ b/src/main/scala/sangria/parser/PositionTracking.scala @@ -5,7 +5,7 @@ import sangria.ast.AstLocation import scala.annotation.tailrec -trait PositionTracking { this: Parser ⇒ +trait PositionTracking { this: Parser => private var lineIdx = Array(0) def parseLocations: Boolean @@ -56,4 +56,4 @@ trait PositionTracking { this: Parser ⇒ go(arr.length - 1, 0) } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/parser/QueryParser.scala b/src/main/scala/sangria/parser/QueryParser.scala index c8e9cada..5bec6f8b 100644 --- a/src/main/scala/sangria/parser/QueryParser.scala +++ b/src/main/scala/sangria/parser/QueryParser.scala @@ -9,7 +9,7 @@ import sangria.util.StringUtil import scala.util.{Failure, Success} -trait Tokens extends StringBuilding with PositionTracking { this: Parser with Ignored ⇒ +trait Tokens extends StringBuilding with PositionTracking { this: Parser with Ignored => def Token = rule { Punctuator | Name | NumberValue | StringValue } @@ -28,8 +28,8 @@ trait Tokens extends StringBuilding with PositionTracking { this: Parser with Ig def Name = rule { Ignored.* ~ NameStrict } def NumberValue = rule { atomic(Comments ~ trackPos ~ IntegerValuePart ~ FloatValuePart.? ~ IgnoredNoComment.*) ~> - ((comment, location, intPart, floatPart) ⇒ - floatPart map (f ⇒ ast.BigDecimalValue(BigDecimal(intPart + f), comment, location)) getOrElse + ((comment, location, intPart, floatPart) => + floatPart map (f => ast.BigDecimalValue(BigDecimal(intPart + f), comment, location)) getOrElse ast.BigIntValue(BigInt(intPart), comment, location)) } def FloatValuePart = rule { atomic(capture(FractionalPart ~ ExponentPart.? | ExponentPart)) } @@ -56,7 +56,7 @@ trait Tokens extends StringBuilding with PositionTracking { this: Parser with Ig def BlockStringValue = rule { Comments ~ trackPos ~ BlockString ~ clearSB() ~ BlockStringCharacters ~ BlockString ~ push(sb.toString) ~ IgnoredNoComment.* ~> - ((comment, location, s) ⇒ ast.StringValue(StringUtil.blockStringValue(s), true, Some(s), comment, location)) + ((comment, location, s) => ast.StringValue(StringUtil.blockStringValue(s), true, Some(s), comment, location)) } def BlockStringCharacters = rule { (BlockStringCharacter | BlockStringEscapedChar).* } @@ -75,7 +75,7 @@ trait Tokens extends StringBuilding with PositionTracking { this: Parser with Ig def NonBlockStringValue = rule { Comments ~ trackPos ~ '"' ~ clearSB() ~ Characters ~ '"' ~ push(sb.toString) ~ IgnoredNoComment.* ~> - ((comment, location, s) ⇒ ast.StringValue(s, false, None, comment, location)) + ((comment, location, s) => ast.StringValue(s, false, None, comment, location)) } def Characters = rule { (NormalCharacter | '\\' ~ EscapedChar).* } @@ -89,7 +89,7 @@ trait Tokens extends StringBuilding with PositionTracking { this: Parser with Ig 'n' ~ appendSB('\n') | 'r' ~ appendSB('\r') | 't' ~ appendSB('\t') | - Unicode ~> { code ⇒ sb.append(code.asInstanceOf[Char]); () } + Unicode ~> { code => sb.append(code.asInstanceOf[Char]); () } } def Unicode = rule { 'u' ~ capture(4 times HexDigit) ~> (Integer.parseInt(_, 16)) } @@ -97,7 +97,7 @@ trait Tokens extends StringBuilding with PositionTracking { this: Parser with Ig def Keyword(s: String) = rule { atomic(Ignored.* ~ s ~ !NameChar ~ IgnoredNoComment.*) } } -trait Ignored extends PositionTracking { this: Parser ⇒ +trait Ignored extends PositionTracking { this: Parser => def parseComments: Boolean val WhiteSpace = CharPredicate("\u0009\u0020") @@ -114,7 +114,7 @@ trait Ignored extends PositionTracking { this: Parser ⇒ def Comments = rule { test(parseComments) ~ CommentCap.* ~ Ignored.* ~> (_.toVector) | CommentNoCap.* ~ Ignored.* ~ push(Vector.empty) } - def CommentCap = rule { trackPos ~ "#" ~ capture(CommentChar.*) ~ IgnoredNoComment.* ~> ((location, comment) ⇒ ast.Comment(comment, location)) } + def CommentCap = rule { trackPos ~ "#" ~ capture(CommentChar.*) ~ IgnoredNoComment.* ~> ((location, comment) => ast.Comment(comment, location)) } def CommentNoCap: Rule0 = rule { "#" ~ CommentChar.* ~ IgnoredNoComment.* } @@ -132,21 +132,21 @@ trait Ignored extends PositionTracking { this: Parser ⇒ } -trait Document { this: Parser with Operations with Ignored with Fragments with Operations with Values with TypeSystemDefinitions ⇒ +trait Document { this: Parser with Operations with Ignored with Fragments with Operations with Values with TypeSystemDefinitions => def Document = rule { IgnoredNoComment.* ~ trackPos ~ Definition.+ ~ IgnoredNoComment.* ~ Comments ~ EOI ~> - ((location, d, comments) ⇒ ast.Document(d.toVector, comments, location)) + ((location, d, comments) => ast.Document(d.toVector, comments, location)) } def InputDocument = rule { IgnoredNoComment.* ~ trackPos ~ ValueConst.+ ~ IgnoredNoComment.* ~ Comments ~ EOI ~> - ((location, vs, comments) ⇒ ast.InputDocument(vs.toVector, comments, location)) + ((location, vs, comments) => ast.InputDocument(vs.toVector, comments, location)) } def InputDocumentWithVariables = rule { IgnoredNoComment.* ~ trackPos ~ Value.+ ~ IgnoredNoComment.* ~ Comments ~ EOI ~> - ((location, vs, comments) ⇒ ast.InputDocument(vs.toVector, comments, location)) + ((location, vs, comments) => ast.InputDocument(vs.toVector, comments, location)) } def Definition = rule { @@ -162,7 +162,7 @@ trait Document { this: Parser with Operations with Ignored with Fragments with O } -trait TypeSystemDefinitions { this: Parser with Tokens with Ignored with Directives with Types with Operations with Values with Fragments ⇒ +trait TypeSystemDefinitions { this: Parser with Tokens with Ignored with Directives with Types with Operations with Values with Fragments => def legacyImplementsInterface: Boolean def legacyEmptyFields: Boolean @@ -194,12 +194,12 @@ trait TypeSystemDefinitions { this: Parser with Tokens with Ignored with Directi def ScalarTypeDefinition = rule { Description ~ Comments ~ trackPos ~ scalar ~ Name ~ (DirectivesConst.? ~> (_ getOrElse Vector.empty)) ~> ( - (descr, comment, location, name, dirs) ⇒ ast.ScalarTypeDefinition(name, dirs, descr, comment, location)) + (descr, comment, location, name, dirs) => ast.ScalarTypeDefinition(name, dirs, descr, comment, location)) } def ObjectTypeDefinition = rule { Description ~ Comments ~ trackPos ~ `type` ~ Name ~ (ImplementsInterfaces.? ~> (_ getOrElse Vector.empty)) ~ (DirectivesConst.? ~> (_ getOrElse Vector.empty)) ~ FieldsDefinition.? ~> ( - (descr, comment, location, name, interfaces, dirs, fields) ⇒ ast.ObjectTypeDefinition(name, interfaces, fields.fold(Vector.empty[ast.FieldDefinition])(_._1.toVector), dirs, descr, comment, fields.fold(Vector.empty[ast.Comment])(_._2), location)) + (descr, comment, location, name, interfaces, dirs, fields) => ast.ObjectTypeDefinition(name, interfaces, fields.fold(Vector.empty[ast.FieldDefinition])(_._1.toVector), dirs, descr, comment, fields.fold(Vector.empty[ast.Comment])(_._2), location)) } def TypeSystemExtension = rule { @@ -218,51 +218,51 @@ trait TypeSystemDefinitions { this: Parser with Tokens with Ignored with Directi def SchemaExtension = rule { (Comments ~ trackPos ~ extend ~ schema ~ (DirectivesConst.? ~> (_ getOrElse Vector.empty)) ~ wsNoComment('{') ~ OperationTypeDefinition.+ ~ Comments ~ wsNoComment('}') ~> ( - (comment, location, dirs, ops, tc) ⇒ ast.SchemaExtensionDefinition(ops.toVector, dirs, comment, tc, location))) | + (comment, location, dirs, ops, tc) => ast.SchemaExtensionDefinition(ops.toVector, dirs, comment, tc, location))) | (Comments ~ trackPos ~ extend ~ schema ~ DirectivesConst ~> ( - (comment, location, dirs) ⇒ ast.SchemaExtensionDefinition(Vector.empty, dirs, comment, Vector.empty, location))) + (comment, location, dirs) => ast.SchemaExtensionDefinition(Vector.empty, dirs, comment, Vector.empty, location))) } def ObjectTypeExtensionDefinition = rule { (Comments ~ trackPos ~ extend ~ `type` ~ Name ~ (ImplementsInterfaces.? ~> (_ getOrElse Vector.empty)) ~ (DirectivesConst.? ~> (_ getOrElse Vector.empty)) ~ FieldsDefinition ~> ( - (comment, location, name, interfaces, dirs, fields) ⇒ ast.ObjectTypeExtensionDefinition(name, interfaces, fields._1.toVector, dirs, comment, fields._2, location))) | + (comment, location, name, interfaces, dirs, fields) => ast.ObjectTypeExtensionDefinition(name, interfaces, fields._1.toVector, dirs, comment, fields._2, location))) | (Comments ~ trackPos ~ extend ~ `type` ~ Name ~ (ImplementsInterfaces.? ~> (_ getOrElse Vector.empty)) ~ DirectivesConst ~> ( - (comment, location, name, interfaces, dirs) ⇒ ast.ObjectTypeExtensionDefinition(name, interfaces, Vector.empty, dirs, comment, Vector.empty, location))) | + (comment, location, name, interfaces, dirs) => ast.ObjectTypeExtensionDefinition(name, interfaces, Vector.empty, dirs, comment, Vector.empty, location))) | (Comments ~ trackPos ~ extend ~ `type` ~ Name ~ ImplementsInterfaces ~> ( - (comment, location, name, interfaces) ⇒ ast.ObjectTypeExtensionDefinition(name, interfaces, Vector.empty, Vector.empty, comment, Vector.empty, location))) + (comment, location, name, interfaces) => ast.ObjectTypeExtensionDefinition(name, interfaces, Vector.empty, Vector.empty, comment, Vector.empty, location))) } def InterfaceTypeExtensionDefinition = rule { (Comments ~ trackPos ~ extend ~ interface ~ Name ~ (DirectivesConst.? ~> (_ getOrElse Vector.empty)) ~ FieldsDefinition ~> ( - (comment, location, name, dirs, fields) ⇒ ast.InterfaceTypeExtensionDefinition(name, fields._1.toVector, dirs, comment, fields._2, location))) | + (comment, location, name, dirs, fields) => ast.InterfaceTypeExtensionDefinition(name, fields._1.toVector, dirs, comment, fields._2, location))) | (Comments ~ trackPos ~ extend ~ interface ~ Name ~ DirectivesConst ~> ( - (comment, location, name, dirs) ⇒ ast.InterfaceTypeExtensionDefinition(name, Vector.empty, dirs, comment, Vector.empty, location))) + (comment, location, name, dirs) => ast.InterfaceTypeExtensionDefinition(name, Vector.empty, dirs, comment, Vector.empty, location))) } def UnionTypeExtensionDefinition = rule { (Comments ~ trackPos ~ extend ~ union ~ Name ~ (DirectivesConst.? ~> (_ getOrElse Vector.empty)) ~ UnionMemberTypes ~> ( - (comment, location, name, dirs, types) ⇒ ast.UnionTypeExtensionDefinition(name, types, dirs, comment, location))) | + (comment, location, name, dirs, types) => ast.UnionTypeExtensionDefinition(name, types, dirs, comment, location))) | (Comments ~ trackPos ~ extend ~ union ~ Name ~ DirectivesConst ~> ( - (comment, location, name, dirs) ⇒ ast.UnionTypeExtensionDefinition(name, Vector.empty, dirs, comment, location))) + (comment, location, name, dirs) => ast.UnionTypeExtensionDefinition(name, Vector.empty, dirs, comment, location))) } def EnumTypeExtensionDefinition = rule { (Comments ~ trackPos ~ extend ~ enum ~ Name ~ (DirectivesConst.? ~> (_ getOrElse Vector.empty)) ~ EnumValuesDefinition ~> ( - (comment, location, name, dirs, values) ⇒ ast.EnumTypeExtensionDefinition(name, values._1.toVector, dirs, comment, values._2, location))) | + (comment, location, name, dirs, values) => ast.EnumTypeExtensionDefinition(name, values._1.toVector, dirs, comment, values._2, location))) | (Comments ~ trackPos ~ extend ~ enum ~ Name ~ DirectivesConst ~> ( - (comment, location, name, dirs) ⇒ ast.EnumTypeExtensionDefinition(name, Vector.empty, dirs, comment, Vector.empty, location))) + (comment, location, name, dirs) => ast.EnumTypeExtensionDefinition(name, Vector.empty, dirs, comment, Vector.empty, location))) } def InputObjectTypeExtensionDefinition = rule { (Comments ~ trackPos ~ extend ~ inputType ~ Name ~ (DirectivesConst.? ~> (_ getOrElse Vector.empty)) ~ InputFieldsDefinition ~> ( - (comment, location, name, dirs, fields) ⇒ ast.InputObjectTypeExtensionDefinition(name, fields._1.toVector, dirs, comment, fields._2, location))) | + (comment, location, name, dirs, fields) => ast.InputObjectTypeExtensionDefinition(name, fields._1.toVector, dirs, comment, fields._2, location))) | (Comments ~ trackPos ~ extend ~ inputType ~ Name ~ DirectivesConst ~> ( - (comment, location, name, dirs) ⇒ ast.InputObjectTypeExtensionDefinition(name, Vector.empty, dirs, comment, Vector.empty, location))) + (comment, location, name, dirs) => ast.InputObjectTypeExtensionDefinition(name, Vector.empty, dirs, comment, Vector.empty, location))) } def ScalarTypeExtensionDefinition = rule { (Comments ~ trackPos ~ extend ~ scalar ~ Name ~ DirectivesConst ~> ( - (comment, location, name, dirs) ⇒ ast.ScalarTypeExtensionDefinition(name, dirs, comment, location))) + (comment, location, name, dirs) => ast.ScalarTypeExtensionDefinition(name, dirs, comment, location))) } def ImplementsInterfaces = rule { @@ -271,29 +271,29 @@ trait TypeSystemDefinitions { this: Parser with Tokens with Ignored with Directi } def FieldsDefinition = rule { - wsNoComment('{') ~ (test(legacyEmptyFields) ~ FieldDefinition.* | FieldDefinition.+) ~ Comments ~ wsNoComment('}') ~> (_ → _) + wsNoComment('{') ~ (test(legacyEmptyFields) ~ FieldDefinition.* | FieldDefinition.+) ~ Comments ~ wsNoComment('}') ~> (_ -> _) } def FieldDefinition = rule { Description ~ Comments ~ trackPos ~ Name ~ (ArgumentsDefinition.? ~> (_ getOrElse Vector.empty)) ~ ws(':') ~ Type ~ (Directives.? ~> (_ getOrElse Vector.empty)) ~> ( - (descr, comment, location, name, args, fieldType, dirs) ⇒ ast.FieldDefinition(name, fieldType, args, dirs, descr, comment, location)) + (descr, comment, location, name, args, fieldType, dirs) => ast.FieldDefinition(name, fieldType, args, dirs, descr, comment, location)) } def ArgumentsDefinition = rule { wsNoComment('(') ~ InputValueDefinition.+ ~ wsNoComment(')') ~> (_.toVector) } def InputValueDefinition = rule { Description ~ Comments ~ trackPos ~ Name ~ ws(':') ~ Type ~ DefaultValue.? ~ (DirectivesConst.? ~> (_ getOrElse Vector.empty)) ~> ( - (descr, comment, location, name, valueType, default, dirs) ⇒ ast.InputValueDefinition(name, valueType, default, dirs, descr, comment, location)) + (descr, comment, location, name, valueType, default, dirs) => ast.InputValueDefinition(name, valueType, default, dirs, descr, comment, location)) } def InterfaceTypeDefinition = rule { Description ~ Comments ~ trackPos ~ interface ~ Name ~ (DirectivesConst.? ~> (_ getOrElse Vector.empty)) ~ FieldsDefinition.? ~> ( - (descr, comment, location, name, dirs, fields) ⇒ ast.InterfaceTypeDefinition(name, fields.fold(Vector.empty[ast.FieldDefinition])(_._1.toVector), dirs, descr, comment, fields.fold(Vector.empty[ast.Comment])(_._2), location)) + (descr, comment, location, name, dirs, fields) => ast.InterfaceTypeDefinition(name, fields.fold(Vector.empty[ast.FieldDefinition])(_._1.toVector), dirs, descr, comment, fields.fold(Vector.empty[ast.Comment])(_._2), location)) } def UnionTypeDefinition = rule { Description ~ Comments ~ trackPos ~ union ~ Name ~ (DirectivesConst.? ~> (_ getOrElse Vector.empty)) ~ (UnionMemberTypes.? ~> (_ getOrElse Vector.empty)) ~> ( - (descr, comment, location, name, dirs, members) ⇒ ast.UnionTypeDefinition(name, members, dirs, descr, comment, location)) + (descr, comment, location, name, dirs, members) => ast.UnionTypeDefinition(name, members, dirs, descr, comment, location)) } def UnionMemberTypes = rule { wsNoComment('=') ~ UnionMembers } @@ -302,32 +302,32 @@ trait TypeSystemDefinitions { this: Parser with Tokens with Ignored with Directi def EnumTypeDefinition = rule { Description ~ Comments ~ trackPos ~ enum ~ Name ~ (DirectivesConst.? ~> (_ getOrElse Vector.empty)) ~ EnumValuesDefinition.? ~> ( - (descr, comment, location, name, dirs, values) ⇒ ast.EnumTypeDefinition(name, values.fold(Vector.empty[ast.EnumValueDefinition])(_._1.toVector), dirs, descr, comment, values.fold(Vector.empty[ast.Comment])(_._2), location)) + (descr, comment, location, name, dirs, values) => ast.EnumTypeDefinition(name, values.fold(Vector.empty[ast.EnumValueDefinition])(_._1.toVector), dirs, descr, comment, values.fold(Vector.empty[ast.Comment])(_._2), location)) } - def EnumValuesDefinition = rule { wsNoComment('{') ~ EnumValueDefinition.+ ~ Comments ~ wsNoComment('}') ~> (_ → _) } + def EnumValuesDefinition = rule { wsNoComment('{') ~ EnumValueDefinition.+ ~ Comments ~ wsNoComment('}') ~> (_ -> _) } def EnumValueDefinition = rule { - Description ~ Comments ~ trackPos ~ Name ~ (DirectivesConst.? ~> (_ getOrElse Vector.empty)) ~> ((descr, comments, location, name, dirs) ⇒ ast.EnumValueDefinition(name, dirs, descr, comments, location)) + Description ~ Comments ~ trackPos ~ Name ~ (DirectivesConst.? ~> (_ getOrElse Vector.empty)) ~> ((descr, comments, location, name, dirs) => ast.EnumValueDefinition(name, dirs, descr, comments, location)) } def InputObjectTypeDefinition = rule { Description ~ Comments ~ trackPos ~ inputType ~ Name ~ (DirectivesConst.? ~> (_ getOrElse Vector.empty)) ~ InputFieldsDefinition.? ~> ( - (descr, comment, location, name, dirs, fields) ⇒ ast.InputObjectTypeDefinition(name, fields.fold(Vector.empty[ast.InputValueDefinition])(_._1.toVector), dirs, descr, comment, fields.fold(Vector.empty[ast.Comment])(_._2), location)) + (descr, comment, location, name, dirs, fields) => ast.InputObjectTypeDefinition(name, fields.fold(Vector.empty[ast.InputValueDefinition])(_._1.toVector), dirs, descr, comment, fields.fold(Vector.empty[ast.Comment])(_._2), location)) } def InputFieldsDefinition = rule { - wsNoComment('{') ~ (test(legacyEmptyFields) ~ InputValueDefinition.* | InputValueDefinition.+) ~ Comments ~ wsNoComment('}') ~> (_ → _) + wsNoComment('{') ~ (test(legacyEmptyFields) ~ InputValueDefinition.* | InputValueDefinition.+) ~ Comments ~ wsNoComment('}') ~> (_ -> _) } def DirectiveDefinition = rule { Description ~ Comments ~ trackPos ~ directive ~ '@' ~ NameStrict ~ (ArgumentsDefinition.? ~> (_ getOrElse Vector.empty)) ~ on ~ DirectiveLocations ~> ( - (descr, comment, location, name, args, locations) ⇒ ast.DirectiveDefinition(name, args, locations, descr, comment, location)) + (descr, comment, location, name, args, locations) => ast.DirectiveDefinition(name, args, locations, descr, comment, location)) } def DirectiveLocations = rule { ws('|').? ~ DirectiveLocation.+(wsNoComment('|')) ~> (_.toVector) } - def DirectiveLocation = rule { Comments ~ trackPos ~ DirectiveLocationName ~> ((comment, location, name) ⇒ ast.DirectiveLocation(name, comment, location)) } + def DirectiveLocation = rule { Comments ~ trackPos ~ DirectiveLocationName ~> ((comment, location, name) => ast.DirectiveLocation(name, comment, location)) } def DirectiveLocationName = rule { TypeSystemDirectiveLocation | ExecutableDirectiveLocation @@ -360,24 +360,24 @@ trait TypeSystemDefinitions { this: Parser with Tokens with Ignored with Directi def SchemaDefinition = rule { Description ~ Comments ~ trackPos ~ schema ~ (DirectivesConst.? ~> (_ getOrElse Vector.empty)) ~ wsNoComment('{') ~ OperationTypeDefinition.+ ~ Comments ~ wsNoComment('}') ~> ( - (descr, comment, location, dirs, ops, tc) ⇒ ast.SchemaDefinition(ops.toVector, dirs, descr, comment, tc, location)) + (descr, comment, location, dirs, ops, tc) => ast.SchemaDefinition(ops.toVector, dirs, descr, comment, tc, location)) } def OperationTypeDefinition = rule { Comments ~ trackPos ~ OperationType ~ ws(':') ~ NamedType ~> ( - (comment, location, opType, tpe) ⇒ ast.OperationTypeDefinition(opType, tpe, comment, location)) + (comment, location, opType, tpe) => ast.OperationTypeDefinition(opType, tpe, comment, location)) } def Description = rule { StringValue.? } } -trait Operations extends PositionTracking { this: Parser with Tokens with Ignored with Fragments with Values with Types with Directives ⇒ +trait Operations extends PositionTracking { this: Parser with Tokens with Ignored with Fragments with Values with Types with Directives => def OperationDefinition = rule { - Comments ~ trackPos ~ SelectionSet ~> ((comment, location, s) ⇒ ast.OperationDefinition(selections = s._1, comments = comment, trailingComments = s._2, location = location)) | + Comments ~ trackPos ~ SelectionSet ~> ((comment, location, s) => ast.OperationDefinition(selections = s._1, comments = comment, trailingComments = s._2, location = location)) | Comments ~ trackPos ~ OperationType ~ OperationName.? ~ (VariableDefinitions.? ~> (_ getOrElse Vector.empty)) ~ (Directives.? ~> (_ getOrElse Vector.empty)) ~ SelectionSet ~> - ((comment, location, opType, name, vars, dirs, sels) ⇒ ast.OperationDefinition(opType, name, vars, dirs, sels._1, comment, sels._2, location)) + ((comment, location, opType, name, vars, dirs, sels) => ast.OperationDefinition(opType, name, vars, dirs, sels._1, comment, sels._2, location)) } def OperationName = rule { Name } @@ -397,7 +397,7 @@ trait Operations extends PositionTracking { this: Parser with Tokens with Ignore def VariableDefinitions = rule { wsNoComment('(') ~ VariableDefinition.+ ~ wsNoComment(')') ~> (_.toVector)} def VariableDefinition = rule { Comments ~ trackPos ~ Variable ~ ws(':') ~ Type ~ DefaultValue.? ~ (DirectivesConst.? ~> (_ getOrElse Vector.empty)) ~> - ((comment, location, name, tpe, defaultValue, dirs) ⇒ ast.VariableDefinition(name, tpe, defaultValue, dirs, comment, location)) } + ((comment, location, name, tpe, defaultValue, dirs) => ast.VariableDefinition(name, tpe, defaultValue, dirs, comment, location)) } def Variable = rule { Ignored.* ~ '$' ~ NameStrict } @@ -405,7 +405,7 @@ trait Operations extends PositionTracking { this: Parser with Tokens with Ignore def SelectionSet: Rule1[(Vector[ast.Selection], Vector[ast.Comment])] = rule { wsNoComment('{') ~ Selection.+ ~ Comments ~ wsNoComment('}') ~> - ((x: Seq[ast.Selection], comments: Vector[ast.Comment]) ⇒ x.toVector → comments) + ((x: Seq[ast.Selection], comments: Vector[ast.Comment]) => x.toVector -> comments) } def Selection = rule { Field | FragmentSpread | InlineFragment } @@ -414,8 +414,8 @@ trait Operations extends PositionTracking { this: Parser with Tokens with Ignore Comments ~ trackPos ~ Alias.? ~ Name ~ (Arguments.? ~> (_ getOrElse Vector.empty)) ~ (Directives.? ~> (_ getOrElse Vector.empty)) ~ - (SelectionSet.? ~> (_ getOrElse (Vector.empty → Vector.empty))) ~> - ((comment, location, alias, name, args, dirs, sels) ⇒ ast.Field(alias, name, args, dirs, sels._1, comment, sels._2, location)) + (SelectionSet.? ~> (_ getOrElse (Vector.empty -> Vector.empty))) ~> + ((comment, location, alias, name, args, dirs, sels) => ast.Field(alias, name, args, dirs, sels._1, comment, sels._2, location)) } def Alias = rule { Name ~ ws(':') } @@ -424,28 +424,28 @@ trait Operations extends PositionTracking { this: Parser with Tokens with Ignore def ArgumentsConst = rule { Ignored.* ~ wsNoComment('(') ~ ArgumentConst.+ ~ wsNoComment(')') ~> (_.toVector) } - def Argument = rule { Comments ~ trackPos ~ Name ~ wsNoComment(':') ~ Value ~> ((comment, location, name, value) ⇒ ast.Argument(name, value, comment, location)) } + def Argument = rule { Comments ~ trackPos ~ Name ~ wsNoComment(':') ~ Value ~> ((comment, location, name, value) => ast.Argument(name, value, comment, location)) } - def ArgumentConst = rule { Comments ~ trackPos ~ Name ~ wsNoComment(':') ~ ValueConst ~> ((comment, location, name, value) ⇒ ast.Argument(name, value, comment, location)) } + def ArgumentConst = rule { Comments ~ trackPos ~ Name ~ wsNoComment(':') ~ ValueConst ~> ((comment, location, name, value) => ast.Argument(name, value, comment, location)) } } -trait Fragments { this: Parser with Tokens with Ignored with Directives with Types with Operations ⇒ +trait Fragments { this: Parser with Tokens with Ignored with Directives with Types with Operations => def experimentalFragmentVariables: Boolean def FragmentSpread = rule { Comments ~ trackPos ~ Ellipsis ~ FragmentName ~ (Directives.? ~> (_ getOrElse Vector.empty)) ~> - ((comment, location, name, dirs) ⇒ ast.FragmentSpread(name, dirs, comment, location)) } + ((comment, location, name, dirs) => ast.FragmentSpread(name, dirs, comment, location)) } def InlineFragment = rule { Comments ~ trackPos ~ Ellipsis ~ TypeCondition.? ~ (Directives.? ~> (_ getOrElse Vector.empty)) ~ SelectionSet ~> - ((comment, location, typeCondition, dirs, sels) ⇒ ast.InlineFragment(typeCondition, dirs, sels._1, comment, sels._2, location)) } + ((comment, location, typeCondition, dirs, sels) => ast.InlineFragment(typeCondition, dirs, sels._1, comment, sels._2, location)) } def on = rule { Keyword("on") } def Fragment = rule { Keyword("fragment") } def FragmentDefinition = rule { Comments ~ trackPos ~ Fragment ~ FragmentName ~ ExperimentalFragmentVariables ~ TypeCondition ~ (Directives.? ~> (_ getOrElse Vector.empty)) ~ SelectionSet ~> - ((comment, location, name, vars, typeCondition, dirs, sels) ⇒ ast.FragmentDefinition(name, typeCondition, dirs, sels._1, vars, comment, sels._2, location)) } + ((comment, location, name, vars, typeCondition, dirs, sels) => ast.FragmentDefinition(name, typeCondition, dirs, sels._1, vars, comment, sels._2, location)) } def ExperimentalFragmentVariables = rule { test(experimentalFragmentVariables) ~ VariableDefinitions.? ~> (_ getOrElse Vector.empty) | push(Vector.empty) @@ -457,14 +457,14 @@ trait Fragments { this: Parser with Tokens with Ignored with Directives with Typ } -trait Values { this: Parser with Tokens with Ignored with Operations ⇒ +trait Values { this: Parser with Tokens with Ignored with Operations => def ValueConst: Rule1[ast.Value] = rule { NumberValue | StringValue | BooleanValue | NullValue | EnumValue | ListValueConst | ObjectValueConst } def Value: Rule1[ast.Value] = rule { - Comments ~ trackPos ~ Variable ~> ((comment, location, name) ⇒ ast.VariableValue(name, comment, location)) | + Comments ~ trackPos ~ Variable ~> ((comment, location, name) => ast.VariableValue(name, comment, location)) | NumberValue | StringValue | BooleanValue | @@ -475,8 +475,8 @@ trait Values { this: Parser with Tokens with Ignored with Operations ⇒ } def BooleanValue = rule { - Comments ~ trackPos ~ True ~> ((comment, location) ⇒ ast.BooleanValue(true, comment, location)) | - Comments ~ trackPos ~ False ~> ((comment, location) ⇒ ast.BooleanValue(false, comment, location)) + Comments ~ trackPos ~ True ~> ((comment, location) => ast.BooleanValue(true, comment, location)) | + Comments ~ trackPos ~ False ~> ((comment, location) => ast.BooleanValue(false, comment, location)) } def True = rule { Keyword("true") } @@ -485,50 +485,50 @@ trait Values { this: Parser with Tokens with Ignored with Operations ⇒ def Null = rule { Keyword("null") } - def NullValue = rule { Comments ~ trackPos ~ Null ~> ((comment, location) ⇒ ast.NullValue(comment, location)) } + def NullValue = rule { Comments ~ trackPos ~ Null ~> ((comment, location) => ast.NullValue(comment, location)) } - def EnumValue = rule { Comments ~ !(True | False) ~ trackPos ~ Name ~> ((comment, location, name) ⇒ ast.EnumValue(name, comment, location)) } + def EnumValue = rule { Comments ~ !(True | False) ~ trackPos ~ Name ~> ((comment, location, name) => ast.EnumValue(name, comment, location)) } - def ListValueConst = rule { Comments ~ trackPos ~ wsNoComment('[') ~ ValueConst.* ~ wsNoComment(']') ~> ((comment, location, v) ⇒ ast.ListValue(v.toVector, comment, location)) } + def ListValueConst = rule { Comments ~ trackPos ~ wsNoComment('[') ~ ValueConst.* ~ wsNoComment(']') ~> ((comment, location, v) => ast.ListValue(v.toVector, comment, location)) } - def ListValue = rule { Comments ~ trackPos ~ wsNoComment('[') ~ Value.* ~ wsNoComment(']') ~> ((comment, location, v) ⇒ ast.ListValue(v.toVector, comment, location)) } + def ListValue = rule { Comments ~ trackPos ~ wsNoComment('[') ~ Value.* ~ wsNoComment(']') ~> ((comment, location, v) => ast.ListValue(v.toVector, comment, location)) } - def ObjectValueConst = rule { Comments ~ trackPos ~ wsNoComment('{') ~ ObjectFieldConst.* ~ wsNoComment('}') ~> ((comment, location, f) ⇒ ast.ObjectValue(f.toVector, comment, location)) } + def ObjectValueConst = rule { Comments ~ trackPos ~ wsNoComment('{') ~ ObjectFieldConst.* ~ wsNoComment('}') ~> ((comment, location, f) => ast.ObjectValue(f.toVector, comment, location)) } - def ObjectValue = rule { Comments ~ trackPos ~ wsNoComment('{') ~ ObjectField.* ~ wsNoComment('}') ~> ((comment, location, f) ⇒ ast.ObjectValue(f.toVector, comment, location)) } + def ObjectValue = rule { Comments ~ trackPos ~ wsNoComment('{') ~ ObjectField.* ~ wsNoComment('}') ~> ((comment, location, f) => ast.ObjectValue(f.toVector, comment, location)) } - def ObjectFieldConst = rule { Comments ~ trackPos ~ Name ~ wsNoComment(':') ~ ValueConst ~> ((comment, location, name, value) ⇒ ast.ObjectField(name, value, comment, location)) } + def ObjectFieldConst = rule { Comments ~ trackPos ~ Name ~ wsNoComment(':') ~ ValueConst ~> ((comment, location, name, value) => ast.ObjectField(name, value, comment, location)) } - def ObjectField = rule { Comments ~ trackPos ~ Name ~ wsNoComment(':') ~ Value ~> ((comment, location, name, value) ⇒ ast.ObjectField(name, value, comment, location)) } + def ObjectField = rule { Comments ~ trackPos ~ Name ~ wsNoComment(':') ~ Value ~> ((comment, location, name, value) => ast.ObjectField(name, value, comment, location)) } } -trait Directives { this: Parser with Tokens with Operations with Ignored ⇒ +trait Directives { this: Parser with Tokens with Operations with Ignored => def Directives = rule { Directive.+ ~> (_.toVector) } def DirectivesConst = rule { DirectiveConst.+ ~> (_.toVector) } def Directive = rule { Comments ~ trackPos ~ '@' ~ NameStrict ~ (Arguments.? ~> (_ getOrElse Vector.empty)) ~> - ((comment, location, name, args) ⇒ ast.Directive(name, args, comment, location)) } + ((comment, location, name, args) => ast.Directive(name, args, comment, location)) } def DirectiveConst = rule { Comments ~ trackPos ~ '@' ~ NameStrict ~ (ArgumentsConst.? ~> (_ getOrElse Vector.empty)) ~> - ((comment, location, name, args) ⇒ ast.Directive(name, args, comment, location)) } + ((comment, location, name, args) => ast.Directive(name, args, comment, location)) } } -trait Types { this: Parser with Tokens with Ignored ⇒ +trait Types { this: Parser with Tokens with Ignored => def Type: Rule1[ast.Type] = rule { NonNullType | ListType | NamedType } def TypeName = rule { Name } - def NamedType = rule { Ignored.* ~ trackPos ~ TypeName ~> ((location, name) ⇒ ast.NamedType(name, location))} + def NamedType = rule { Ignored.* ~ trackPos ~ TypeName ~> ((location, name) => ast.NamedType(name, location))} - def ListType = rule { trackPos ~ ws('[') ~ Type ~ wsNoComment(']') ~> ((location, tpe) ⇒ ast.ListType(tpe, location)) } + def ListType = rule { trackPos ~ ws('[') ~ Type ~ wsNoComment(']') ~> ((location, tpe) => ast.ListType(tpe, location)) } def NonNullType = rule { - trackPos ~ TypeName ~ wsNoComment('!') ~> ((location, name) ⇒ ast.NotNullType(ast.NamedType(name, location), location)) | - trackPos ~ ListType ~ wsNoComment('!') ~> ((location, tpe) ⇒ ast.NotNullType(tpe, location)) + trackPos ~ TypeName ~ wsNoComment('!') ~> ((location, name) => ast.NotNullType(ast.NamedType(name, location), location)) | + trackPos ~ ListType ~ wsNoComment('!') ~> ((location, tpe) => ast.NotNullType(tpe, location)) } } @@ -559,9 +559,9 @@ object QueryParser { config.parseComments) parser.Document.run() match { - case Success(res) ⇒ scheme.success(res.copy(sourceMapper = config.sourceMapperFn(id, input))) - case Failure(e: ParseError) ⇒ scheme.failure(SyntaxError(parser, input, e)) - case Failure(e) ⇒ scheme.failure(e) + case Success(res) => scheme.success(res.copy(sourceMapper = config.sourceMapperFn(id, input))) + case Failure(e: ParseError) => scheme.failure(SyntaxError(parser, input, e)) + case Failure(e) => scheme.failure(e) } } @@ -572,10 +572,10 @@ object QueryParser { val parser = new QueryParser(input, "") parser.InputDocument.run() match { - case Success(res) if res.values.nonEmpty ⇒ scheme.success(res.values.head) - case Success(res) ⇒ scheme.failure(new IllegalArgumentException("Input document does not contain any value definitions.")) - case Failure(e: ParseError) ⇒ scheme.failure(SyntaxError(parser, input, e)) - case Failure(e) ⇒ scheme.failure(e) + case Success(res) if res.values.nonEmpty => scheme.success(res.values.head) + case Success(res) => scheme.failure(new IllegalArgumentException("Input document does not contain any value definitions.")) + case Failure(e: ParseError) => scheme.failure(SyntaxError(parser, input, e)) + case Failure(e) => scheme.failure(e) } } @@ -594,9 +594,9 @@ object QueryParser { config.parseComments) parser.InputDocument.run() match { - case Success(res) ⇒ scheme.success(res.copy(sourceMapper = config.sourceMapperFn(id, input))) - case Failure(e: ParseError) ⇒ scheme.failure(SyntaxError(parser, input, e)) - case Failure(e) ⇒ scheme.failure(e) + case Success(res) => scheme.success(res.copy(sourceMapper = config.sourceMapperFn(id, input))) + case Failure(e: ParseError) => scheme.failure(SyntaxError(parser, input, e)) + case Failure(e) => scheme.failure(e) } } @@ -607,10 +607,10 @@ object QueryParser { val parser = new QueryParser(input, "") parser.InputDocumentWithVariables.run() match { - case Success(res) if res.values.nonEmpty ⇒ scheme.success(res.values.head) - case Success(res) ⇒ scheme.failure(new IllegalArgumentException("Input document does not contain any value definitions.")) - case Failure(e: ParseError) ⇒ scheme.failure(SyntaxError(parser, input, e)) - case Failure(e) ⇒ scheme.failure(e) + case Success(res) if res.values.nonEmpty => scheme.success(res.values.head) + case Success(res) => scheme.failure(new IllegalArgumentException("Input document does not contain any value definitions.")) + case Failure(e: ParseError) => scheme.failure(SyntaxError(parser, input, e)) + case Failure(e) => scheme.failure(e) } } @@ -622,9 +622,9 @@ object QueryParser { val parser = new QueryParser(input, id) parser.InputDocumentWithVariables.run() match { - case Success(res) ⇒ scheme.success(res.copy(sourceMapper = config.sourceMapperFn(id, input))) - case Failure(e: ParseError) ⇒ scheme.failure(SyntaxError(parser, input, e)) - case Failure(e) ⇒ scheme.failure(e) + case Success(res) => scheme.success(res.copy(sourceMapper = config.sourceMapperFn(id, input))) + case Failure(e: ParseError) => scheme.failure(SyntaxError(parser, input, e)) + case Failure(e) => scheme.failure(e) } } } @@ -633,8 +633,8 @@ case class ParserConfig( legacyImplementsInterface: Boolean = false, legacyEmptyFields: Boolean = false, experimentalFragmentVariables: Boolean = false, - sourceIdFn: ParserInput ⇒ String = ParserConfig.defaultSourceIdFn, - sourceMapperFn: (String, ParserInput) ⇒ Option[SourceMapper] = ParserConfig.defaultSourceMapperFn, + sourceIdFn: ParserInput => String = ParserConfig.defaultSourceIdFn, + sourceMapperFn: (String, ParserInput) => Option[SourceMapper] = ParserConfig.defaultSourceMapperFn, parseLocations: Boolean = true, parseComments: Boolean = true ) { @@ -648,7 +648,7 @@ case class ParserConfig( def withEmptySourceId: ParserConfig = copy(sourceIdFn = ParserConfig.emptySourceIdFn) - def withSourceMapper(fn: (String, ParserInput) ⇒ Option[SourceMapper]): ParserConfig = copy(sourceMapperFn = fn) + def withSourceMapper(fn: (String, ParserInput) => Option[SourceMapper]): ParserConfig = copy(sourceMapperFn = fn) def withoutSourceMapper: ParserConfig = copy(sourceMapperFn = ParserConfig.emptySourceMapperFn) @@ -660,10 +660,10 @@ case class ParserConfig( object ParserConfig { lazy val default: ParserConfig = ParserConfig() - lazy val emptySourceIdFn: ParserInput ⇒ String = _ ⇒ "" - lazy val defaultSourceIdFn: ParserInput ⇒ String = _ ⇒ UUID.randomUUID().toString + lazy val emptySourceIdFn: ParserInput => String = _ => "" + lazy val defaultSourceIdFn: ParserInput => String = _ => UUID.randomUUID().toString - lazy val emptySourceMapperFn: (String, ParserInput) ⇒ Option[SourceMapper] = (_, _) ⇒ None - lazy val defaultSourceMapperFn: (String, ParserInput) ⇒ Option[SourceMapper] = - (id, input) ⇒ Some(new DefaultSourceMapper(id, input)) -} \ No newline at end of file + lazy val emptySourceMapperFn: (String, ParserInput) => Option[SourceMapper] = (_, _) => None + lazy val defaultSourceMapperFn: (String, ParserInput) => Option[SourceMapper] = + (id, input) => Some(new DefaultSourceMapper(id, input)) +} diff --git a/src/main/scala/sangria/parser/SourceMapper.scala b/src/main/scala/sangria/parser/SourceMapper.scala index 79f11b0c..9301b7c2 100644 --- a/src/main/scala/sangria/parser/SourceMapper.scala +++ b/src/main/scala/sangria/parser/SourceMapper.scala @@ -21,23 +21,23 @@ class DefaultSourceMapper(val id: String, val parserInput: ParserInput) extends } class AggregateSourceMapper(val id: String, val delegates: Vector[SourceMapper]) extends SourceMapper { - lazy val delegateById: Map[String, SourceMapper] = delegates.iterator.map(d ⇒ d.id → d).toMap + lazy val delegateById: Map[String, SourceMapper] = delegates.iterator.map(d => d.id -> d).toMap lazy val source = delegates.map(_.source.trim) mkString "\n\n" def renderLocation(location: AstLocation) = - delegateById.get(location.sourceId).fold("")(sm ⇒ sm.renderLocation(location)) + delegateById.get(location.sourceId).fold("")(sm => sm.renderLocation(location)) def renderLinePosition(location: AstLocation, prefix: String = "") = - delegateById.get(location.sourceId).fold("")(sm ⇒ sm.renderLinePosition(location, prefix)) + delegateById.get(location.sourceId).fold("")(sm => sm.renderLinePosition(location, prefix)) } object AggregateSourceMapper { def merge(mappers: Vector[SourceMapper]) = { def expand(sm: SourceMapper): Vector[SourceMapper] = sm match { - case agg: AggregateSourceMapper ⇒ agg.delegates.flatMap(expand) - case m ⇒ Vector(m) + case agg: AggregateSourceMapper => agg.delegates.flatMap(expand) + case m => Vector(m) } new AggregateSourceMapper("merged", mappers.flatMap(expand)) diff --git a/src/main/scala/sangria/renderer/QueryRenderer.scala b/src/main/scala/sangria/renderer/QueryRenderer.scala index 8c18aa7b..684830c9 100644 --- a/src/main/scala/sangria/renderer/QueryRenderer.scala +++ b/src/main/scala/sangria/renderer/QueryRenderer.scala @@ -42,18 +42,18 @@ object QueryRenderer { def renderSelections(sels: Vector[Selection], tc: WithTrailingComments, indent: Indent, config: QueryRendererConfig) = if (sels.nonEmpty) { val rendered = sels.zipWithIndex map { - case (sel, idx) ⇒ + case (sel, idx) => val prev = if (idx == 0) None else Some(sels(idx - 1)) val next = if (idx == sels.size - 1) None else Some(sels(idx + 1)) val trailingNext = - for (n ← next; c ← n.comments.headOption; cp ← c.location; sp ← sel.location; if cp.line == sp.line) yield c + for (n <- next; c <- n.comments.headOption; cp <- c.location; sp <- sel.location; if cp.line == sp.line) yield c val trailing = - trailingNext orElse (for (c ← tc.trailingComments.headOption; cp ← c.location; sp ← sel.location; if cp.line == sp.line) yield c) + trailingNext orElse (for (c <- tc.trailingComments.headOption; cp <- c.location; sp <- sel.location; if cp.line == sp.line) yield c) (if (idx != 0 && shouldRenderComment(sel, prev, config)) config.lineBreak else "") + renderNode(sel, config, indent.inc, prev = prev) + - trailing.fold("")(c ⇒ renderIndividualComment(c, " ", config)) + trailing.fold("")(c => renderIndividualComment(c, " ", config)) } mkString config.mandatoryLineBreak "{" + @@ -68,19 +68,19 @@ object QueryRenderer { def renderFieldDefinitions(fields: Vector[FieldDefinition], tc: WithTrailingComments, indent: Indent, config: QueryRendererConfig, frontSep: Boolean = false) = if (fields.nonEmpty) { val rendered = fields.zipWithIndex map { - case (field, idx) ⇒ + case (field, idx) => val prev = if (idx == 0) None else Some(fields(idx - 1)) val next = if (idx == fields.size - 1) None else Some(fields(idx + 1)) val trailingNext = - for (n ← next; c ← n.description.fold(n.comments)(_.comments).headOption; cp ← c.location; sp ← field.location; if cp.line == sp.line) yield c + for (n <- next; c <- n.description.fold(n.comments)(_.comments).headOption; cp <- c.location; sp <- field.location; if cp.line == sp.line) yield c val trailing = - trailingNext orElse (for (c ← tc.trailingComments.headOption; cp ← c.location; sp ← field.location; if cp.line == sp.line) yield c) + trailingNext orElse (for (c <- tc.trailingComments.headOption; cp <- c.location; sp <- field.location; if cp.line == sp.line) yield c) (if (idx != 0 && (shouldRenderComment(field, prev, config) || shouldRenderDescription(field))) config.lineBreak else "") + renderNode(field, config, indent.inc, prev = prev) + - trailing.fold("")(c ⇒ renderIndividualComment(c, " ", config)) + trailing.fold("")(c => renderIndividualComment(c, " ", config)) } mkString config.mandatoryLineBreak (if (frontSep) config.separator else "") + @@ -108,25 +108,25 @@ object QueryRenderer { def renderEnumValues(values: Vector[EnumValueDefinition], tc: WithTrailingComments, indent: Indent, config: QueryRendererConfig, frontSep: Boolean = false) = if (values.nonEmpty) { val renderedValues = values.zipWithIndex map { - case (value, idx) ⇒ + case (value, idx) => val prev = if (idx == 0) None else Some(values(idx - 1)) val next = if (idx == values.size - 1) None else Some(values(idx + 1)) val trailingNext = for { - n ← next - c ← n.description.fold(n.comments)(_.comments).headOption - cp ← c.location - sp ← value.location + n <- next + c <- n.description.fold(n.comments)(_.comments).headOption + cp <- c.location + sp <- value.location if cp.line == sp.line } yield c val trailing = - trailingNext orElse (for (c ← tc.trailingComments.headOption; cp ← c.location; sp ← value.location; if cp.line == sp.line) yield c) + trailingNext orElse (for (c <- tc.trailingComments.headOption; cp <- c.location; sp <- value.location; if cp.line == sp.line) yield c) (if (idx != 0 && (shouldRenderComment(value, prev, config) || shouldRenderDescription(value))) config.lineBreak else "") + renderNode(value, config, indent.inc, prev = prev) + - trailing.fold("")(c ⇒ renderIndividualComment(c, " ", config)) + trailing.fold("")(c => renderIndividualComment(c, " ", config)) } mkString config.mandatoryLineBreak @@ -142,7 +142,7 @@ object QueryRenderer { def renderOperationTypeDefinitions(ops: Vector[OperationTypeDefinition], tc: WithTrailingComments, indent: Indent, config: QueryRendererConfig, frontSep: Boolean = false) = if (ops.nonEmpty) { - val renderedOps = ops.zipWithIndex map { case (op, idx) ⇒ + val renderedOps = ops.zipWithIndex map { case (op, idx) => (if (idx != 0 && shouldRenderComment(op, None, config)) config.lineBreak else "") + renderNode(op, config, indent.inc) } mkString config.mandatoryLineBreak @@ -163,7 +163,7 @@ object QueryRenderer { def renderArgs(args: Vector[Argument], indent: Indent, config: QueryRendererConfig, withSep: Boolean = true) = if (args.nonEmpty) { - val argsRendered = args.zipWithIndex map { case (a, idx) ⇒ + val argsRendered = args.zipWithIndex map { case (a, idx) => (if (idx != 0 && shouldRenderComment(a, None, config)) config.lineBreak else "") + (if (shouldRenderComment(a, None, config)) config.mandatoryLineBreak else if (idx != 0) config.separator else "") + renderNode(a, config, if (shouldRenderComment(a, None, config)) indent.inc else indent.zero) @@ -174,7 +174,7 @@ object QueryRenderer { def renderInputValueDefs(args: Vector[InputValueDefinition], indent: Indent, config: QueryRendererConfig, withSep: Boolean = true) = if (args.nonEmpty) { - val argsRendered = args.zipWithIndex map { case (a, idx) ⇒ + val argsRendered = args.zipWithIndex map { case (a, idx) => (if (idx != 0 && (shouldRenderComment(a, None, config) || shouldRenderDescription(a))) config.lineBreak else "") + (if (shouldRenderComment(a, None, config) || shouldRenderDescription(a)) config.mandatoryLineBreak else if (idx != 0) config.separator else "") + renderNode(a, config, if (shouldRenderComment(a, None, config) || shouldRenderDescription(a)) indent.inc else indent.zero) @@ -185,7 +185,7 @@ object QueryRenderer { def renderVarDefs(vars: Vector[VariableDefinition], indent: Indent, config: QueryRendererConfig, withSep: Boolean = true) = if (vars.nonEmpty) { - val varsRendered = vars.zipWithIndex map { case (v, idx) ⇒ + val varsRendered = vars.zipWithIndex map { case (v, idx) => (if (idx != 0 && shouldRenderComment(v, None, config)) config.lineBreak else "") + (if (shouldRenderComment(v, None, config)) config.mandatoryLineBreak else if (idx != 0) config.separator else "") + renderNode(v, config, if (shouldRenderComment(v, None, config)) indent + 2 else indent.zero) @@ -195,19 +195,19 @@ object QueryRenderer { } else "" def renderInputObjectFieldDefs(fields: Vector[InputValueDefinition], tc: WithTrailingComments, indent: Indent, config: QueryRendererConfig) = { - val fieldsRendered = fields.zipWithIndex map { case (f, idx) ⇒ + val fieldsRendered = fields.zipWithIndex map { case (f, idx) => val prev = if (idx == 0) None else Some(fields(idx - 1)) val next = if (idx == fields.size - 1) None else Some(fields(idx + 1)) val trailingNext = - for (n ← next; c ← n.description.fold(n.comments)(_.comments).headOption; cp ← c.location; sp ← f.location; if cp.line == sp.line) yield c + for (n <- next; c <- n.description.fold(n.comments)(_.comments).headOption; cp <- c.location; sp <- f.location; if cp.line == sp.line) yield c val trailing = - trailingNext orElse (for (c ← tc.trailingComments.headOption; cp ← c.location; sp ← f.location; if cp.line == sp.line) yield c) + trailingNext orElse (for (c <- tc.trailingComments.headOption; cp <- c.location; sp <- f.location; if cp.line == sp.line) yield c) (if (idx != 0 && (shouldRenderComment(f, prev, config) || shouldRenderDescription(f))) config.lineBreak else "") + renderNode(f, config, indent.inc, prev = prev) + - trailing.fold("")(c ⇒ renderIndividualComment(c, " ", config)) + trailing.fold("")(c => renderIndividualComment(c, " ", config)) } fieldsRendered mkString config.mandatoryLineBreak @@ -222,17 +222,17 @@ object QueryRenderer { else "" def renderOpType(operationType: OperationType) = operationType match { - case OperationType.Query ⇒ "query" - case OperationType.Mutation ⇒ "mutation" - case OperationType.Subscription ⇒ "subscription" + case OperationType.Query => "query" + case OperationType.Mutation => "mutation" + case OperationType.Subscription => "subscription" } def actualComments(node: WithComments, prev: Option[AstNode]) = { - val ignoreFirst = for (ls ← prev; p ← ls.location; c ← node.comments.headOption; cp ← c.location) yield cp.line == p.line + val ignoreFirst = for (ls <- prev; p <- ls.location; c <- node.comments.headOption; cp <- c.location) yield cp.line == p.line ignoreFirst match { - case Some(true) ⇒ node.comments.tail - case _ ⇒ node.comments + case Some(true) => node.comments.tail + case _ => node.comments } } @@ -258,11 +258,11 @@ object QueryRenderer { def renderDescription(node: WithDescription, prev: Option[AstNode], indent: Indent, config: QueryRendererConfig): String = { node.description match { - case Some(description) ⇒ + case Some(description) => renderComment(description, prev, indent, config) + indent.str + renderStringValue(description, indent, config, extraIndent = false) + config.mandatoryLineBreak - case None ⇒ "" + case None => "" } } @@ -280,20 +280,20 @@ object QueryRenderer { val nodeLine = nodePos.map(_.line).orElse(comments.last.location.map(_.line + 1)).fold(1)(identity) comments.foldRight((nodeLine, Vector.empty[String])) { - case (c, (lastLine, acc)) ⇒ + case (c, (lastLine, acc)) => val currLine = c.location.fold(lastLine - 1)(_.line) val diffLines = lastLine - currLine val fill = if (diffLines > 1) config.lineBreak else "" - currLine → ((renderIndividualComment(c, indent.str, config) + fill) +: acc) + currLine -> ((renderIndividualComment(c, indent.str, config) + fill) +: acc) }._2 } def renderTrailingComment(node: WithTrailingComments, lastSelection: Option[AstNode], indent: Indent, config: QueryRendererConfig): String = { - val ignoreFirst = for (ls ← lastSelection; p ← ls.location; c ← node.trailingComments.headOption; cp ← c.location) yield cp.line == p.line + val ignoreFirst = for (ls <- lastSelection; p <- ls.location; c <- node.trailingComments.headOption; cp <- c.location) yield cp.line == p.line val comments = ignoreFirst match { - case Some(true) ⇒ node.trailingComments.tail - case _ ⇒ node.trailingComments + case Some(true) => node.trailingComments.tail + case _ => node.trailingComments } if (shouldRenderComment(comments, config)) { @@ -326,18 +326,18 @@ object QueryRenderer { def renderNode(node: AstNode, config: QueryRendererConfig, indent: Indent, prefix: Option[String] = None, prev: Option[AstNode] = None): String = node match { - case d @ Document(defs, _, _, _) ⇒ + case d @ Document(defs, _, _, _) => (defs map (renderNode(_, config, indent)) mkString config.definitionSeparator) + renderTrailingComment(d, None, indent, config) - case d @ InputDocument(defs, _, _, _) ⇒ + case d @ InputDocument(defs, _, _, _) => (defs map (renderNode(_, config, indent)) mkString config.definitionSeparator) + renderTrailingComment(d, None, indent, config) - case op @ OperationDefinition(OperationType.Query, None, vars, dirs, sels, _, _, _) if vars.isEmpty && dirs.isEmpty ⇒ + case op @ OperationDefinition(OperationType.Query, None, vars, dirs, sels, _, _, _) if vars.isEmpty && dirs.isEmpty => renderComment(op, prev, indent, config) + indent.str + renderSelections(sels, op, indent, config) - case op @ OperationDefinition(opType, name, vars, dirs, sels, _, _, _) ⇒ + case op @ OperationDefinition(opType, name, vars, dirs, sels, _, _, _) => renderComment(op, prev, indent, config) + indent.str + renderOpType(opType) + config.mandatorySeparator + (name getOrElse "") + @@ -346,30 +346,30 @@ object QueryRenderer { renderDirs(dirs, config, indent) + renderSelections(sels, op, indent, config) - case fd @ FragmentDefinition(name, typeCondition, dirs, sels, vars, _, _, _) ⇒ + case fd @ FragmentDefinition(name, typeCondition, dirs, sels, vars, _, _, _) => renderComment(fd, prev, indent, config) + indent.str + "fragment" + config.mandatorySeparator + name + renderVarDefs(vars, indent, config, withSep = false) + config.mandatorySeparator + "on" + config.mandatorySeparator + typeCondition.name + config.separator + renderDirs(dirs, config, indent) + renderSelections(sels, fd, indent, config) - case vd @ VariableDefinition(name, tpe, defaultValue, dirs, _, _) ⇒ + case vd @ VariableDefinition(name, tpe, defaultValue, dirs, _, _) => renderComment(vd, prev, indent, config) + indent.str + "$" + name + ":" + config.separator + renderNode(tpe, config, indent.zero) + - (defaultValue map (v ⇒ config.separator + "=" + config.separator + renderNode(v, config, indent.zero)) getOrElse "") + + (defaultValue map (v => config.separator + "=" + config.separator + renderNode(v, config, indent.zero)) getOrElse "") + renderDirs(dirs, config, indent, frontSep = true) - case NotNullType(ofType, _) ⇒ + case NotNullType(ofType, _) => renderNode(ofType, config, indent.zero) + "!" - case ListType(ofType, _) ⇒ + case ListType(ofType, _) => "[" + renderNode(ofType, config, indent.zero) + "]" - case NamedType(name, _) ⇒ + case NamedType(name, _) => name - case f @ Field(alias, name, args, dirs, sels, _, _, _) ⇒ + case f @ Field(alias, name, args, dirs, sels, _, _, _) => renderComment(f, prev, indent, config) + indent.str + (alias map (_ + ":" + config.separator) getOrElse "") + name + renderArgs(args, indent, config, withSep = false) + @@ -377,36 +377,36 @@ object QueryRenderer { renderDirs(dirs, config, indent, withSep = sels.nonEmpty) + renderSelections(sels, f, indent, config) - case fs @ FragmentSpread(name, dirs, _, _) ⇒ + case fs @ FragmentSpread(name, dirs, _, _) => renderComment(fs, prev, indent, config) + indent.str + "..." + name + renderDirs(dirs, config, indent, frontSep = true) - case ifr @ InlineFragment(typeCondition, dirs, sels, _, _, _) ⇒ + case ifr @ InlineFragment(typeCondition, dirs, sels, _, _, _) => renderComment(ifr, prev, indent, config) + indent.str + "..." + config.mandatorySeparator + typeCondition.fold("")("on" + config.mandatorySeparator + _.name) + config.separator + renderDirs(dirs, config, indent) + renderSelections(sels, ifr, indent, config) - case Directive(name, args, _, _) ⇒ + case Directive(name, args, _, _) => indent.str + "@" + name + renderArgs(args, indent, config.copy(renderComments = false), withSep = false) - case a @ Argument(name, value, _, _) ⇒ + case a @ Argument(name, value, _, _) => renderComment(a, prev, indent, config) + indent.str + name + ":" + config.separator + renderNode(value, config, indent.zero) - case v @ IntValue(value, _, _) ⇒ renderInputComment(v, indent, config) + value - case v @ BigIntValue(value, _, _) ⇒ renderInputComment(v, indent, config) + value - case v @ FloatValue(value, _, _) ⇒ renderInputComment(v, indent, config) + value - case v @ BigDecimalValue(value, _, _) ⇒ renderInputComment(v, indent, config) + value - case v @ BooleanValue(value, _, _) ⇒ renderInputComment(v, indent, config) + value - case v @ NullValue(_, _) ⇒ renderInputComment(v, indent, config) + "null" - case v @ EnumValue(value, _, _) ⇒ renderInputComment(v, indent, config) + value - case v @ StringValue(_, _, _, _, _) ⇒ renderInputComment(v, indent, config) + renderStringValue(v, indent, config) + case v @ IntValue(value, _, _) => renderInputComment(v, indent, config) + value + case v @ BigIntValue(value, _, _) => renderInputComment(v, indent, config) + value + case v @ FloatValue(value, _, _) => renderInputComment(v, indent, config) + value + case v @ BigDecimalValue(value, _, _) => renderInputComment(v, indent, config) + value + case v @ BooleanValue(value, _, _) => renderInputComment(v, indent, config) + value + case v @ NullValue(_, _) => renderInputComment(v, indent, config) + "null" + case v @ EnumValue(value, _, _) => renderInputComment(v, indent, config) + value + case v @ StringValue(_, _, _, _, _) => renderInputComment(v, indent, config) + renderStringValue(v, indent, config) - case v @ ListValue(value, _, _) ⇒ + case v @ ListValue(value, _, _) => def addIdent(v: Value) = v match { - case o: ObjectValue ⇒ false - case _ ⇒ true + case o: ObjectValue => false + case _ => true } def renderValue(v: Value, idx: Int) = @@ -418,14 +418,14 @@ object QueryRenderer { (if (idx != 0) config.separator else "") + renderNode(v, config, indent) renderInputComment(v, indent, config) + - "[" + (value.zipWithIndex map {case (v, idx) ⇒ renderValue(v, idx)} mkString config.inputListSeparator) + "]" - case v @ ObjectValue(value, _, _) ⇒ + "[" + (value.zipWithIndex map {case (v, idx) => renderValue(v, idx)} mkString config.inputListSeparator) + "]" + case v @ ObjectValue(value, _, _) => renderInputComment(v, indent, config) + "{" + inputLineBreak(config) + - (value.zipWithIndex map {case (v, idx) ⇒ (if (idx != 0 && config.formatInputValues && shouldRenderComment(v, None, config)) config.lineBreak else "") + renderNode(v, config, inputFieldIndent(config, indent))} mkString config.inputFieldSeparator) + + (value.zipWithIndex map {case (v, idx) => (if (idx != 0 && config.formatInputValues && shouldRenderComment(v, None, config)) config.lineBreak else "") + renderNode(v, config, inputFieldIndent(config, indent))} mkString config.inputFieldSeparator) + inputLineBreak(config) + inputIndent(config, indent) + "}" - case VariableValue(name, _, _) ⇒ indent.str + "$" + name - case v @ ObjectField(name, value, _, _) ⇒ + case VariableValue(name, _, _) => indent.str + "$" + name + case v @ ObjectField(name, value, _, _) => val rendered = if (config.formatInputValues && shouldRenderComment(value, None, config)) config.lineBreak + renderNode(value, config, indent.inc) @@ -435,15 +435,15 @@ object QueryRenderer { (if (config.formatInputValues) renderComment(v, prev, indent, config) else "") + indent.str + name + ":" + rendered - case c @ Comment(_, _) ⇒ renderIndividualComment(c, indent.str, config) + case c @ Comment(_, _) => renderIndividualComment(c, indent.str, config) - case std @ ScalarTypeDefinition(name, dirs, description, _, _) ⇒ + case std @ ScalarTypeDefinition(name, dirs, description, _, _) => renderDescription(std, prev, indent, config) + renderComment(std, description orElse prev, indent, config) + indent.str + "scalar" + config.mandatorySeparator + name + renderDirs(dirs, config, indent, frontSep = true) - case otd @ ObjectTypeDefinition(name, interfaces, fields, dirs, description, _, _, _) ⇒ + case otd @ ObjectTypeDefinition(name, interfaces, fields, dirs, description, _, _, _) => renderDescription(otd, prev, indent, config) + renderComment(otd, description orElse prev, indent, config) + indent.str + prefix.getOrElse("") + "type" + config.mandatorySeparator + name + @@ -452,21 +452,21 @@ object QueryRenderer { renderDirs(dirs, config, indent, withSep = fields.nonEmpty) + renderFieldDefinitions(fields, otd, indent, config) - case itd @ InputObjectTypeDefinition(name, fields, dirs, description, _, _, _) ⇒ + case itd @ InputObjectTypeDefinition(name, fields, dirs, description, _, _, _) => renderDescription(itd, prev, indent, config) + renderComment(itd, description orElse prev, indent, config) + indent.str + "input" + config.mandatorySeparator + name + renderDirs(dirs, config, indent, frontSep = true) + renderInputFieldDefinitions(fields, itd, indent, config, frontSep = true) - case itd @ InterfaceTypeDefinition(name, fields, dirs, description, _, _, _) ⇒ + case itd @ InterfaceTypeDefinition(name, fields, dirs, description, _, _, _) => renderDescription(itd, prev, indent, config) + renderComment(itd, description orElse prev, indent, config) + indent.str + "interface" + config.mandatorySeparator + name + renderDirs(dirs, config, indent, frontSep = true) + renderFieldDefinitions(fields, itd, indent, config, frontSep = true) - case utd @ UnionTypeDefinition(name, types, dirs, description, _, _) ⇒ + case utd @ UnionTypeDefinition(name, types, dirs, description, _, _) => val typesString = if (types.nonEmpty) config.separator + "=" + config.separator + @@ -479,20 +479,20 @@ object QueryRenderer { renderDirs(dirs, config, indent, frontSep = true) + typesString - case etd @ EnumTypeDefinition(name, values, dirs, description, _, _, _) ⇒ + case etd @ EnumTypeDefinition(name, values, dirs, description, _, _, _) => renderDescription(etd, prev, indent, config) + renderComment(etd, description orElse prev, indent, config) + indent.str + "enum" + config.mandatorySeparator + name + renderDirs(dirs, config, indent, frontSep = true) + renderEnumValues(values, etd, indent, config, frontSep = true) - case evd @ EnumValueDefinition(name, dirs, description, _, _) ⇒ + case evd @ EnumValueDefinition(name, dirs, description, _, _) => renderDescription(evd, prev, indent, config) + renderComment(evd, description orElse prev, indent, config) + indent.str + name + renderDirs(dirs, config, indent, frontSep = true) - case fd @ FieldDefinition(name, fieldType, args, dirs, description, _, _) ⇒ + case fd @ FieldDefinition(name, fieldType, args, dirs, description, _, _) => renderDescription(fd, prev, indent, config) + renderComment(fd, description orElse prev, indent, config) + indent.str + name + @@ -500,14 +500,14 @@ object QueryRenderer { ":" + config.separator + renderNode(fieldType, config, indent.zero) + renderDirs(dirs, config, indent, frontSep = true) - case ivd @ InputValueDefinition(name, valueType, default, dirs, description, _, _) ⇒ + case ivd @ InputValueDefinition(name, valueType, default, dirs, description, _, _) => renderDescription(ivd, prev, indent, config) + renderComment(ivd, description orElse prev, indent, config) + indent.str + name + ":" + config.separator + renderNode(valueType, config, indent.zero) + - default.fold("")(d ⇒ config.separator + "=" + config.separator + renderNode(d, config, indent.zero)) + + default.fold("")(d => config.separator + "=" + config.separator + renderNode(d, config, indent.zero)) + renderDirs(dirs, config, indent, frontSep = true) - case ted @ ObjectTypeExtensionDefinition(name, interfaces, fields, dirs, _, _, _) ⇒ + case ted @ ObjectTypeExtensionDefinition(name, interfaces, fields, dirs, _, _, _) => renderComment(ted, prev, indent, config) + indent.str + prefix.getOrElse("") + "extend" + config.mandatorySeparator + "type" + config.mandatorySeparator + name + config.mandatorySeparator + @@ -515,13 +515,13 @@ object QueryRenderer { renderDirs(dirs, config, indent, withSep = fields.nonEmpty) + renderFieldDefinitions(fields, ted, indent, config) - case ext @ InterfaceTypeExtensionDefinition(name, fields, dirs, _, _, _) ⇒ + case ext @ InterfaceTypeExtensionDefinition(name, fields, dirs, _, _, _) => renderComment(ext, prev, indent, config) + indent.str + "extend" + config.mandatorySeparator + "interface" + config.mandatorySeparator + name + renderDirs(dirs, config, indent, frontSep = true) + renderFieldDefinitions(fields, ext, indent, config, frontSep = true) - case ext @ UnionTypeExtensionDefinition(name, types, dirs, _, _) ⇒ + case ext @ UnionTypeExtensionDefinition(name, types, dirs, _, _) => val typesString = if (types.nonEmpty) config.separator + "=" + config.separator + @@ -534,31 +534,31 @@ object QueryRenderer { renderDirs(dirs, config, indent, frontSep = true) + typesString - case ext @ InputObjectTypeExtensionDefinition(name, fields, dirs, _, _, _) ⇒ + case ext @ InputObjectTypeExtensionDefinition(name, fields, dirs, _, _, _) => renderComment(ext, prev, indent, config) + indent.str + "extend" + config.mandatorySeparator + "input" + config.mandatorySeparator + name + renderDirs(dirs, config, indent, frontSep = true) + renderInputFieldDefinitions(fields, ext, indent, config, frontSep = true) - case ext @ EnumTypeExtensionDefinition(name, values, dirs, _, _, _) ⇒ + case ext @ EnumTypeExtensionDefinition(name, values, dirs, _, _, _) => renderComment(ext, prev, indent, config) + indent.str + "extend" + config.mandatorySeparator + "enum" + config.mandatorySeparator + name + renderDirs(dirs, config, indent, frontSep = true) + renderEnumValues(values, ext, indent, config, frontSep = true) - case ext @ ScalarTypeExtensionDefinition(name, dirs, _, _) ⇒ + case ext @ ScalarTypeExtensionDefinition(name, dirs, _, _) => renderComment(ext, prev, indent, config) + indent.str + "extend" + config.mandatorySeparator + "scalar" + config.mandatorySeparator + name + renderDirs(dirs, config, indent, frontSep = true) - case ext @ SchemaExtensionDefinition(ops, dirs, _, _, _) ⇒ + case ext @ SchemaExtensionDefinition(ops, dirs, _, _, _) => renderComment(ext, prev, indent, config) + indent.str + "extend" + config.mandatorySeparator + "schema" + renderDirs(dirs, config, indent, frontSep = true) + renderOperationTypeDefinitions(ops, ext, indent, config, frontSep = true) - case dd @ DirectiveDefinition(name, args, locations, description, _, _) ⇒ - val locsRendered = locations.zipWithIndex map { case (l, idx) ⇒ + case dd @ DirectiveDefinition(name, args, locations, description, _, _) => + val locsRendered = locations.zipWithIndex map { case (l, idx) => (if (idx != 0 && shouldRenderComment(l, None, config)) config.lineBreak else "") + (if (shouldRenderComment(l, None, config)) config.lineBreak else if (idx != 0) config.separator else "") + renderNode(l, config, if (shouldRenderComment(l, None, config)) indent.inc else indent.zero) @@ -571,17 +571,17 @@ object QueryRenderer { "on" + (if (shouldRenderComment(locations.head, None, config)) "" else config.mandatorySeparator) + locsRendered.mkString(config.separator + "|") - case dl @ DirectiveLocation(name, _, _) ⇒ + case dl @ DirectiveLocation(name, _, _) => renderComment(dl, prev, indent, config) + indent.str + name - case sd @ SchemaDefinition(ops, dirs, description, _, _, _) ⇒ + case sd @ SchemaDefinition(ops, dirs, description, _, _, _) => renderDescription(sd, prev, indent, config) + renderComment(sd, description orElse prev, indent, config) + indent.str + "schema" + config.separator + renderDirs(dirs, config, indent) + renderOperationTypeDefinitions(ops, sd, indent, config) - case otd @ OperationTypeDefinition(op, tpe, _, _) ⇒ + case otd @ OperationTypeDefinition(op, tpe, _, _) => renderComment(otd, prev, indent, config) + indent.str + renderOpType(op) + ":" + config.separator + renderNode(tpe, config, indent.zero) } @@ -637,4 +637,4 @@ case class QueryRendererConfig( formatInputValues: Boolean, formatBlockStrings: Boolean, renderComments: Boolean, - legacyImplementsInterface: Boolean) \ No newline at end of file + legacyImplementsInterface: Boolean) diff --git a/src/main/scala/sangria/renderer/SchemaRenderer.scala b/src/main/scala/sangria/renderer/SchemaRenderer.scala index bcaeca39..2071de92 100644 --- a/src/main/scala/sangria/renderer/SchemaRenderer.scala +++ b/src/main/scala/sangria/renderer/SchemaRenderer.scala @@ -13,11 +13,11 @@ import sangria.visitor.VisitorCommand object SchemaRenderer { def renderTypeName(tpe: Type, topLevel: Boolean = false) = { def loop(t: Type, suffix: String): String = t match { - case OptionType(ofType) ⇒ loop(ofType, "") - case OptionInputType(ofType) ⇒ loop(ofType, "") - case ListType(ofType) ⇒ s"[${loop(ofType, "!")}]" + suffix - case ListInputType(ofType) ⇒ s"[${loop(ofType, "!")}]" + suffix - case named: Named ⇒ named.name + suffix + case OptionType(ofType) => loop(ofType, "") + case OptionInputType(ofType) => loop(ofType, "") + case ListType(ofType) => s"[${loop(ofType, "!")}]" + suffix + case ListInputType(ofType) => s"[${loop(ofType, "!")}]" + suffix + case named: Named => named.name + suffix } loop(tpe, if (topLevel) "" else "!") @@ -29,37 +29,37 @@ object SchemaRenderer { else tpe def loop(t: Type, notNull: Boolean): ast.Type = t match { - case OptionType(ofType) ⇒ loop(ofType, false) - case OptionInputType(ofType) ⇒ loop(ofType, false) - case ListType(ofType) ⇒ nn(ast.ListType(loop(ofType, true)), notNull) - case ListInputType(ofType) ⇒ nn(ast.ListType(loop(ofType, true)), notNull) - case named: Named ⇒ nn(ast.NamedType(named.name), notNull) + case OptionType(ofType) => loop(ofType, false) + case OptionInputType(ofType) => loop(ofType, false) + case ListType(ofType) => nn(ast.ListType(loop(ofType, true)), notNull) + case ListInputType(ofType) => nn(ast.ListType(loop(ofType, true)), notNull) + case named: Named => nn(ast.NamedType(named.name), notNull) } loop(tpe, !topLevel) } def renderDescription(description: Option[String]): Option[ast.StringValue] = - description.flatMap { d ⇒ + description.flatMap { d => if (d.trim.nonEmpty) Some(ast.StringValue(d, block = d.indexOf('\n') > 0)) else None } def renderImplementedInterfaces(tpe: IntrospectionObjectType) = - tpe.interfaces.map(t ⇒ ast.NamedType(t.name)).toVector + tpe.interfaces.map(t => ast.NamedType(t.name)).toVector def renderImplementedInterfaces(tpe: ObjectLikeType[_, _]) = - tpe.allInterfaces.map(t ⇒ ast.NamedType(t.name)) + tpe.allInterfaces.map(t => ast.NamedType(t.name)) def renderTypeName(tpe: IntrospectionTypeRef): ast.Type = tpe match { - case IntrospectionListTypeRef(ofType) ⇒ ast.ListType(renderTypeName(ofType)) - case IntrospectionNonNullTypeRef(ofType) ⇒ ast.NotNullType(renderTypeName(ofType)) - case IntrospectionNamedTypeRef(_, name) ⇒ ast.NamedType(name) + case IntrospectionListTypeRef(ofType) => ast.ListType(renderTypeName(ofType)) + case IntrospectionNonNullTypeRef(ofType) => ast.NotNullType(renderTypeName(ofType)) + case IntrospectionNamedTypeRef(_, name) => ast.NamedType(name) } def renderDefault(defaultValue: Option[String]) = - defaultValue.flatMap(d ⇒ QueryParser.parseInput(d).toOption) + defaultValue.flatMap(d => QueryParser.parseInput(d).toOption) def renderDefault(value: (Any, ToInput[_, _]), tpe: InputType[_]) = { val coercionHelper = new ValueCoercionHelper[Any] @@ -81,10 +81,10 @@ object SchemaRenderer { def withoutDeprecated(dirs: Vector[ast.Directive]) = dirs.filterNot(_.name == "deprecated") def renderDeprecation(isDeprecated: Boolean, reason: Option[String]) = (isDeprecated, reason) match { - case (true, Some(r)) if r.trim == DefaultDeprecationReason ⇒ Vector(ast.Directive("deprecated", Vector.empty)) - case (true, Some(r)) if r.trim.nonEmpty ⇒ Vector(ast.Directive("deprecated", Vector(ast.Argument("reason", ast.StringValue(r.trim))))) - case (true, _) ⇒ Vector(ast.Directive("deprecated", Vector.empty)) - case _ ⇒ Vector.empty + case (true, Some(r)) if r.trim == DefaultDeprecationReason => Vector(ast.Directive("deprecated", Vector.empty)) + case (true, Some(r)) if r.trim.nonEmpty => Vector(ast.Directive("deprecated", Vector(ast.Argument("reason", ast.StringValue(r.trim))))) + case (true, _) => Vector(ast.Directive("deprecated", Vector.empty)) + case _ => Vector.empty } def renderArgsI(args: Seq[IntrospectionInputValue]) = @@ -130,7 +130,7 @@ object SchemaRenderer { ast.EnumTypeDefinition(tpe.name, renderEnumValues(tpe.values), tpe.astDirectives, renderDescription(tpe.description)) def renderEnumValuesI(values: Seq[IntrospectionEnumValue]) = - values.map(v ⇒ ast.EnumValueDefinition(v.name, renderDeprecation(v.isDeprecated, v.deprecationReason), renderDescription(v.description))).toVector + values.map(v => ast.EnumValueDefinition(v.name, renderDeprecation(v.isDeprecated, v.deprecationReason), renderDescription(v.description))).toVector def renderEnumValues(values: Seq[EnumValue[_]]) = values.map(renderEnumValue).toVector @@ -157,18 +157,18 @@ object SchemaRenderer { ast.InterfaceTypeDefinition(tpe.name, renderFields(tpe.uniqueFields), tpe.astDirectives, renderDescription(tpe.description)) def renderUnion(tpe: IntrospectionUnionType) = - ast.UnionTypeDefinition(tpe.name, tpe.possibleTypes.map(t ⇒ ast.NamedType(t.name)).toVector, description = renderDescription(tpe.description)) + ast.UnionTypeDefinition(tpe.name, tpe.possibleTypes.map(t => ast.NamedType(t.name)).toVector, description = renderDescription(tpe.description)) def renderUnion(tpe: UnionType[_]) = - ast.UnionTypeDefinition(tpe.name, tpe.types.map(t ⇒ ast.NamedType(t.name)).toVector, tpe.astDirectives, renderDescription(tpe.description)) + ast.UnionTypeDefinition(tpe.name, tpe.types.map(t => ast.NamedType(t.name)).toVector, tpe.astDirectives, renderDescription(tpe.description)) private def renderSchemaDefinition(schema: IntrospectionSchema): Option[ast.SchemaDefinition] = if (isSchemaOfCommonNames(schema.queryType.name, schema.mutationType.map(_.name), schema.subscriptionType.map(_.name))) None else { val withQuery = Vector(ast.OperationTypeDefinition(ast.OperationType.Query, ast.NamedType(schema.queryType.name))) - val withMutation = schema.mutationType.fold(withQuery)(t ⇒ withQuery :+ ast.OperationTypeDefinition(ast.OperationType.Mutation, ast.NamedType(t.name))) - val withSubs = schema.subscriptionType.fold(withMutation)(t ⇒ withMutation :+ ast.OperationTypeDefinition(ast.OperationType.Subscription, ast.NamedType(t.name))) + val withMutation = schema.mutationType.fold(withQuery)(t => withQuery :+ ast.OperationTypeDefinition(ast.OperationType.Mutation, ast.NamedType(t.name))) + val withSubs = schema.subscriptionType.fold(withMutation)(t => withMutation :+ ast.OperationTypeDefinition(ast.OperationType.Subscription, ast.NamedType(t.name))) Some(ast.SchemaDefinition(withSubs, description = renderDescription(schema.description))) } @@ -178,8 +178,8 @@ object SchemaRenderer { None else { val withQuery = Vector(ast.OperationTypeDefinition(ast.OperationType.Query, ast.NamedType(schema.query.name))) - val withMutation = schema.mutation.fold(withQuery)(t ⇒ withQuery :+ ast.OperationTypeDefinition(ast.OperationType.Mutation, ast.NamedType(t.name))) - val withSubs = schema.subscription.fold(withMutation)(t ⇒ withMutation :+ ast.OperationTypeDefinition(ast.OperationType.Subscription, ast.NamedType(t.name))) + val withMutation = schema.mutation.fold(withQuery)(t => withQuery :+ ast.OperationTypeDefinition(ast.OperationType.Mutation, ast.NamedType(t.name))) + val withSubs = schema.subscription.fold(withMutation)(t => withMutation :+ ast.OperationTypeDefinition(ast.OperationType.Subscription, ast.NamedType(t.name))) Some(ast.SchemaDefinition(withSubs, schema.astDirectives, renderDescription(schema.description))) } @@ -189,25 +189,25 @@ object SchemaRenderer { def renderType(tpe: IntrospectionType): ast.TypeDefinition = tpe match { - case o: IntrospectionObjectType ⇒ renderObject(o) - case u: IntrospectionUnionType ⇒ renderUnion(u) - case i: IntrospectionInterfaceType ⇒ renderInterface(i) - case io: IntrospectionInputObjectType ⇒ renderInputObject(io) - case s: IntrospectionScalarType ⇒ renderScalar(s) - case e: IntrospectionEnumType ⇒ renderEnum(e) - case kind ⇒ throw new IllegalArgumentException(s"Unsupported kind: $kind") + case o: IntrospectionObjectType => renderObject(o) + case u: IntrospectionUnionType => renderUnion(u) + case i: IntrospectionInterfaceType => renderInterface(i) + case io: IntrospectionInputObjectType => renderInputObject(io) + case s: IntrospectionScalarType => renderScalar(s) + case e: IntrospectionEnumType => renderEnum(e) + case kind => throw new IllegalArgumentException(s"Unsupported kind: $kind") } def renderType(tpe: Type with Named): ast.TypeDefinition = tpe match { - case o: ObjectType[_, _] ⇒ renderObject(o) - case u: UnionType[_] ⇒ renderUnion(u) - case i: InterfaceType[_, _] ⇒ renderInterface(i) - case io: InputObjectType[_] ⇒ renderInputObject(io) - case s: ScalarType[_] ⇒ renderScalar(s) - case s: ScalarAlias[_, _] ⇒ renderScalar(s.aliasFor) - case e: EnumType[_] ⇒ renderEnum(e) - case _ ⇒ throw new IllegalArgumentException(s"Unsupported type: $tpe") + case o: ObjectType[_, _] => renderObject(o) + case u: UnionType[_] => renderUnion(u) + case i: InterfaceType[_, _] => renderInterface(i) + case io: InputObjectType[_] => renderInputObject(io) + case s: ScalarType[_] => renderScalar(s) + case s: ScalarAlias[_, _] => renderScalar(s.aliasFor) + case e: EnumType[_] => renderEnum(e) + case _ => throw new IllegalArgumentException(s"Unsupported type: $tpe") } def renderDirectiveLocation(loc: DirectiveLocation.Value) = @@ -221,8 +221,8 @@ object SchemaRenderer { def schemaAstFromIntrospection(introspectionSchema: IntrospectionSchema, filter: SchemaFilter = SchemaFilter.default): ast.Document = { val schemaDef = if (filter.renderSchema) renderSchemaDefinition(introspectionSchema) else None - val types = introspectionSchema.types filter (t ⇒ filter.filterTypes(t.name)) sortBy (_.name) map renderType - val directives = introspectionSchema.directives filter (d ⇒ filter.filterDirectives(d.name)) sortBy (_.name) map renderDirective + val types = introspectionSchema.types filter (t => filter.filterTypes(t.name)) sortBy (_.name) map renderType + val directives = introspectionSchema.directives filter (d => filter.filterDirectives(d.name)) sortBy (_.name) map renderDirective ast.Document(schemaDef.toVector ++ types ++ directives) } @@ -247,8 +247,8 @@ object SchemaRenderer { def schemaAst(schema: Schema[_, _], filter: SchemaFilter = SchemaFilter.default): ast.Document = { val schemaDef = if (filter.renderSchema) renderSchemaDefinition(schema) else None - val types = schema.typeList filter (t ⇒ filter.filterTypes(t.name)) sortBy (_.name) map renderType - val directives = schema.directives filter (d ⇒ filter.filterDirectives(d.name)) sortBy (_.name) map renderDirective + val types = schema.typeList filter (t => filter.filterTypes(t.name)) sortBy (_.name) map renderType + val directives = schema.directives filter (d => filter.filterDirectives(d.name)) sortBy (_.name) map renderDirective val document = ast.Document(schemaDef.toVector ++ types ++ directives) @@ -258,30 +258,30 @@ object SchemaRenderer { def transformLegacyCommentDescriptions[T <: AstNode](node: T): T = AstVisitor.visit(node, AstVisitor { - case n: ast.DirectiveDefinition if n.description.isDefined ⇒ + case n: ast.DirectiveDefinition if n.description.isDefined => VisitorCommand.Transform(n.copy(description = None, comments = n.comments ++ commentDescription(n))) - case n: ast.InterfaceTypeDefinition if n.description.isDefined ⇒ + case n: ast.InterfaceTypeDefinition if n.description.isDefined => VisitorCommand.Transform(n.copy(description = None, comments = n.comments ++ commentDescription(n))) - case n: ast.EnumTypeDefinition if n.description.isDefined ⇒ + case n: ast.EnumTypeDefinition if n.description.isDefined => VisitorCommand.Transform(n.copy(description = None, comments = n.comments ++ commentDescription(n))) - case n: ast.EnumValueDefinition if n.description.isDefined ⇒ + case n: ast.EnumValueDefinition if n.description.isDefined => VisitorCommand.Transform(n.copy(description = None, comments = n.comments ++ commentDescription(n))) - case n: ast.FieldDefinition if n.description.isDefined ⇒ + case n: ast.FieldDefinition if n.description.isDefined => VisitorCommand.Transform(n.copy(description = None, comments = n.comments ++ commentDescription(n))) - case n: ast.InputObjectTypeDefinition if n.description.isDefined ⇒ + case n: ast.InputObjectTypeDefinition if n.description.isDefined => VisitorCommand.Transform(n.copy(description = None, comments = n.comments ++ commentDescription(n))) - case n: ast.InputValueDefinition if n.description.isDefined ⇒ + case n: ast.InputValueDefinition if n.description.isDefined => VisitorCommand.Transform(n.copy(description = None, comments = n.comments ++ commentDescription(n))) - case n: ast.ObjectTypeDefinition if n.description.isDefined ⇒ + case n: ast.ObjectTypeDefinition if n.description.isDefined => VisitorCommand.Transform(n.copy(description = None, comments = n.comments ++ commentDescription(n))) - case n: ast.ScalarTypeDefinition if n.description.isDefined ⇒ + case n: ast.ScalarTypeDefinition if n.description.isDefined => VisitorCommand.Transform(n.copy(description = None, comments = n.comments ++ commentDescription(n))) - case n: ast.UnionTypeDefinition if n.description.isDefined ⇒ + case n: ast.UnionTypeDefinition if n.description.isDefined => VisitorCommand.Transform(n.copy(description = None, comments = n.comments ++ commentDescription(n))) }) private def commentDescription(node: ast.WithDescription) = - node.description.toVector.flatMap(sv ⇒ sv.value.split("\\r?\\n").toVector.map(ast.Comment(_))) + node.description.toVector.flatMap(sv => sv.value.split("\\r?\\n").toVector.map(ast.Comment(_))) def renderSchema(schema: Schema[_, _]): String = schemaAst(schema, SchemaFilter.default).renderPretty @@ -290,34 +290,34 @@ object SchemaRenderer { schemaAst(schema, filter).renderPretty } -case class SchemaFilter(filterTypes: String ⇒ Boolean, filterDirectives: String ⇒ Boolean, renderSchema: Boolean = true, legacyCommentDescriptions: Boolean = false) { +case class SchemaFilter(filterTypes: String => Boolean, filterDirectives: String => Boolean, renderSchema: Boolean = true, legacyCommentDescriptions: Boolean = false) { @deprecated("Please migrate to new string-based description format", "1.4.0") def withLegacyCommentDescriptions = copy(legacyCommentDescriptions = true) } object SchemaFilter { val withoutSangriaBuiltIn: SchemaFilter = SchemaFilter( - typeName ⇒ !Schema.isBuiltInType(typeName), - dirName ⇒ !Schema.isBuiltInDirective(dirName)) + typeName => !Schema.isBuiltInType(typeName), + dirName => !Schema.isBuiltInDirective(dirName)) val default: SchemaFilter = withoutSangriaBuiltIn val withoutGraphQLBuiltIn = SchemaFilter( - typeName ⇒ !Schema.isBuiltInGraphQLType(typeName), - dirName ⇒ !Schema.isBuiltInDirective(dirName)) + typeName => !Schema.isBuiltInGraphQLType(typeName), + dirName => !Schema.isBuiltInDirective(dirName)) val withoutIntrospection: SchemaFilter = SchemaFilter( - typeName ⇒ !Schema.isIntrospectionType(typeName), + typeName => !Schema.isIntrospectionType(typeName), Function.const(true)) val builtIn: SchemaFilter = SchemaFilter( - typeName ⇒ Schema.isBuiltInType(typeName), - dirName ⇒ Schema.isBuiltInDirective(dirName)) + typeName => Schema.isBuiltInType(typeName), + dirName => Schema.isBuiltInDirective(dirName)) val introspection: SchemaFilter = SchemaFilter( - typeName ⇒ Schema.isIntrospectionType(typeName), + typeName => Schema.isIntrospectionType(typeName), Function.const(false), renderSchema = false) val all: SchemaFilter = SchemaFilter(Function.const(true), Function.const(true)) -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/schema/AstSchemaBuilder.scala b/src/main/scala/sangria/schema/AstSchemaBuilder.scala index 439eff2a..3ef79a89 100644 --- a/src/main/scala/sangria/schema/AstSchemaBuilder.scala +++ b/src/main/scala/sangria/schema/AstSchemaBuilder.scala @@ -40,7 +40,7 @@ trait AstSchemaBuilder[Ctx] { origin: MatOrigin, definition: ast.ObjectTypeDefinition, extensions: List[ast.ObjectTypeExtensionDefinition], - fields: () ⇒ List[Field[Ctx, Any]], + fields: () => List[Field[Ctx, Any]], interfaces: List[InterfaceType[Ctx, Any]], mat: AstSchemaMaterializer[Ctx]): Option[ObjectType[Ctx, Any]] @@ -48,7 +48,7 @@ trait AstSchemaBuilder[Ctx] { origin: MatOrigin, existing: ObjectType[Ctx, _], extensions: List[ast.ObjectTypeExtensionDefinition], - fields: () ⇒ List[Field[Ctx, Any]], + fields: () => List[Field[Ctx, Any]], interfaces: List[InterfaceType[Ctx, Any]], mat: AstSchemaMaterializer[Ctx]): ObjectType[Ctx, Any] @@ -56,28 +56,28 @@ trait AstSchemaBuilder[Ctx] { origin: MatOrigin, extensions: Vector[ast.InputObjectTypeExtensionDefinition], definition: ast.InputObjectTypeDefinition, - fields: () ⇒ List[InputField[_]], + fields: () => List[InputField[_]], mat: AstSchemaMaterializer[Ctx]): Option[InputObjectType[InputObjectType.DefaultInput]] def transformInputObjectType[T]( origin: MatOrigin, extensions: Vector[ast.InputObjectTypeExtensionDefinition], existing: InputObjectType[T], - fields: () ⇒ List[InputField[_]], + fields: () => List[InputField[_]], mat: AstSchemaMaterializer[Ctx]): InputObjectType[T] def buildInterfaceType( origin: MatOrigin, definition: ast.InterfaceTypeDefinition, extensions: List[ast.InterfaceTypeExtensionDefinition], - fields: () ⇒ List[Field[Ctx, Any]], + fields: () => List[Field[Ctx, Any]], mat: AstSchemaMaterializer[Ctx]): Option[InterfaceType[Ctx, Any]] def extendInterfaceType( origin: MatOrigin, existing: InterfaceType[Ctx, _], extensions: List[ast.InterfaceTypeExtensionDefinition], - fields: () ⇒ List[Field[Ctx, Any]], + fields: () => List[Field[Ctx, Any]], mat: AstSchemaMaterializer[Ctx]): InterfaceType[Ctx, Any] def buildUnionType( @@ -263,8 +263,8 @@ object AstSchemaBuilder { def unapply(definition: Either[ast.TypeDefinition, Named]): Option[String] = definition match { - case Left(t) ⇒ Some(t.name) - case Right(t) ⇒ Some(t.name) + case Left(t) => Some(t.name) + case Right(t) => Some(t.name) } } @@ -279,15 +279,15 @@ object AstSchemaBuilder { def extractDescription(node: ast.WithComments): Option[String] = if (node.comments.nonEmpty) { node.location.map(_.line).orElse(node.comments.last.location.map(_.line + 1)) match { - case Some(nodeLine) ⇒ + case Some(nodeLine) => val (_, relevantComments) = node.comments.foldRight((nodeLine - 1, Vector.empty[String])) { - case (c, (expectedLine, acc)) if c.location.isDefined && c.location.get.line == expectedLine ⇒ - (expectedLine - 1) → (c.text +: acc) - case (c, acc ) ⇒ acc + case (c, (expectedLine, acc)) if c.location.isDefined && c.location.get.line == expectedLine => + (expectedLine - 1) -> (c.text +: acc) + case (c, acc ) => acc } extractDescription(relevantComments) - case None ⇒ + case None => extractDescription(node.comments map (_.text)) } } else None @@ -355,23 +355,23 @@ class DefaultAstSchemaBuilder[Ctx] extends AstSchemaBuilder[Ctx] { origin: MatOrigin, definition: ast.ObjectTypeDefinition, extensions: List[ast.ObjectTypeExtensionDefinition], - fields: () ⇒ List[Field[Ctx, Any]], + fields: () => List[Field[Ctx, Any]], interfaces: List[InterfaceType[Ctx, Any]], mat: AstSchemaMaterializer[Ctx]) = { val directives = definition.directives ++ extensions.flatMap(_.directives) val objectType = objectTypeInstanceCheck(origin, definition, extensions) match { - case Some(fn) ⇒ + case Some(fn) => ObjectType[Ctx, Any]( name = typeName(definition), description = typeDescription(definition), fieldsFn = fields, interfaces = interfaces, - instanceCheck = (value: Any, clazz: Class[_], _: ObjectType[Ctx, Any]) ⇒ fn(value, clazz), + instanceCheck = (value: Any, clazz: Class[_], _: ObjectType[Ctx, Any]) => fn(value, clazz), astDirectives = directives, astNodes = (definition +: extensions).toVector) - case None ⇒ + case None => ObjectType[Ctx, Any]( name = typeName(definition), description = typeDescription(definition), @@ -389,31 +389,31 @@ class DefaultAstSchemaBuilder[Ctx] extends AstSchemaBuilder[Ctx] { origin: MatOrigin, existing: ObjectType[Ctx, _], extensions: List[ast.ObjectTypeExtensionDefinition], - fields: () ⇒ List[Field[Ctx, Any]], + fields: () => List[Field[Ctx, Any]], interfaces: List[InterfaceType[Ctx, Any]], mat: AstSchemaMaterializer[Ctx]) = extendedObjectTypeInstanceCheck(origin, existing, extensions) match { - case Some(fn) ⇒ + case Some(fn) => existing.copy( fieldsFn = fields, interfaces = interfaces, astDirectives = existing.astDirectives ++ extensions.flatMap(_.directives), astNodes = existing.astNodes ++ extensions, - instanceCheck = (value: Any, clazz: Class[_], _: ObjectType[Ctx, Any]) ⇒ fn(value, clazz))(ClassTag(existing.valClass)) - case None ⇒ + instanceCheck = (value: Any, clazz: Class[_], _: ObjectType[Ctx, Any]) => fn(value, clazz))(ClassTag(existing.valClass)) + case None => existing.copy( fieldsFn = fields, interfaces = interfaces, astDirectives = existing.astDirectives ++ extensions.flatMap(_.directives), astNodes = existing.astNodes ++ extensions, - instanceCheck = existing.instanceCheck.asInstanceOf[(Any, Class[_], ObjectType[Ctx, _]) ⇒ Boolean])(ClassTag(existing.valClass)) + instanceCheck = existing.instanceCheck.asInstanceOf[(Any, Class[_], ObjectType[Ctx, _]) => Boolean])(ClassTag(existing.valClass)) } def buildInputObjectType( origin: MatOrigin, extensions: Vector[ast.InputObjectTypeExtensionDefinition], definition: ast.InputObjectTypeDefinition, - fields: () ⇒ List[InputField[_]], + fields: () => List[InputField[_]], mat: AstSchemaMaterializer[Ctx]) = Some(InputObjectType( name = typeName(definition), @@ -426,7 +426,7 @@ class DefaultAstSchemaBuilder[Ctx] extends AstSchemaBuilder[Ctx] { origin: MatOrigin, definition: ast.InterfaceTypeDefinition, extensions: List[ast.InterfaceTypeExtensionDefinition], - fields: () ⇒ List[Field[Ctx, Any]], + fields: () => List[Field[Ctx, Any]], mat: AstSchemaMaterializer[Ctx]) = { val directives = definition.directives ++ extensions.flatMap(_.directives) @@ -435,7 +435,7 @@ class DefaultAstSchemaBuilder[Ctx] extends AstSchemaBuilder[Ctx] { description = typeDescription(definition), fieldsFn = fields, interfaces = Nil, - manualPossibleTypes = () ⇒ Nil, + manualPossibleTypes = () => Nil, astDirectives = directives, astNodes = (definition +: extensions).toVector)) } @@ -444,9 +444,9 @@ class DefaultAstSchemaBuilder[Ctx] extends AstSchemaBuilder[Ctx] { origin: MatOrigin, existing: InterfaceType[Ctx, _], extensions: List[ast.InterfaceTypeExtensionDefinition], - fields: () ⇒ List[Field[Ctx, Any]], + fields: () => List[Field[Ctx, Any]], mat: AstSchemaMaterializer[Ctx]) = - existing.copy(fieldsFn = fields, manualPossibleTypes = () ⇒ Nil, interfaces = Nil, + existing.copy(fieldsFn = fields, manualPossibleTypes = () => Nil, interfaces = Nil, astDirectives = existing.astDirectives ++ extensions.flatMap(_.directives), astNodes = existing.astNodes ++ extensions) @@ -541,7 +541,7 @@ class DefaultAstSchemaBuilder[Ctx] extends AstSchemaBuilder[Ctx] { tags = fieldTags(typeDefinition, definition), deprecationReason = fieldDeprecationReason(definition), complexity = fieldComplexity(typeDefinition, definition), - manualPossibleTypes = () ⇒ Nil, + manualPossibleTypes = () => Nil, astDirectives = definition.directives, astNodes = Vector(definition))) @@ -571,7 +571,7 @@ class DefaultAstSchemaBuilder[Ctx] extends AstSchemaBuilder[Ctx] { fieldType = fieldType, arguments = arguments, resolve = extendFieldResolver(origin, typeDefinition, existing, fieldType, mat), - manualPossibleTypes = () ⇒ Nil) + manualPossibleTypes = () => Nil) def extendArgument( origin: MatOrigin, @@ -680,7 +680,7 @@ class DefaultAstSchemaBuilder[Ctx] extends AstSchemaBuilder[Ctx] { origin: MatOrigin, extensions: Vector[ast.InputObjectTypeExtensionDefinition], existing: InputObjectType[T], - fields: () ⇒ List[InputField[_]], + fields: () => List[InputField[_]], mat: AstSchemaMaterializer[Ctx]) = existing.copy(fieldsFn = fields, astDirectives = existing.astDirectives ++ extensions.flatMap(_.directives), @@ -713,13 +713,13 @@ class DefaultAstSchemaBuilder[Ctx] extends AstSchemaBuilder[Ctx] { existing: Directive, mat: AstSchemaMaterializer[Ctx]) = existing - def objectTypeInstanceCheck(origin: MatOrigin, definition: ast.ObjectTypeDefinition, extensions: List[ast.ObjectTypeExtensionDefinition]): Option[(Any, Class[_]) ⇒ Boolean] = + def objectTypeInstanceCheck(origin: MatOrigin, definition: ast.ObjectTypeDefinition, extensions: List[ast.ObjectTypeExtensionDefinition]): Option[(Any, Class[_]) => Boolean] = None - def extendedObjectTypeInstanceCheck(origin: MatOrigin, tpe: ObjectType[Ctx, _], extensions: List[ast.ObjectTypeExtensionDefinition]): Option[(Any, Class[_]) ⇒ Boolean] = + def extendedObjectTypeInstanceCheck(origin: MatOrigin, tpe: ObjectType[Ctx, _], extensions: List[ast.ObjectTypeExtensionDefinition]): Option[(Any, Class[_]) => Boolean] = None - def directiveShouldInclude(definition: ast.DirectiveDefinition): DirectiveContext ⇒ Boolean = + def directiveShouldInclude(definition: ast.DirectiveDefinition): DirectiveContext => Boolean = Function.const(true) def argumentFromInput( @@ -733,27 +733,27 @@ class DefaultAstSchemaBuilder[Ctx] extends AstSchemaBuilder[Ctx] { typeDefinition: Either[ast.TypeDefinition, ObjectLikeType[Ctx, _]], extensions: Vector[ast.ObjectLikeTypeExtensionDefinition], definition: ast.FieldDefinition, - mat: AstSchemaMaterializer[Ctx]): Context[Ctx, _] ⇒ Action[Ctx, _] = - _ ⇒ throw DefaultIntrospectionSchemaBuilder.MaterializedSchemaException + mat: AstSchemaMaterializer[Ctx]): Context[Ctx, _] => Action[Ctx, _] = + _ => throw DefaultIntrospectionSchemaBuilder.MaterializedSchemaException def extendFieldResolver( origin: MatOrigin, typeDefinition: Option[ObjectLikeType[Ctx, _]], existing: Field[Ctx, Any], fieldType: OutputType[_], - mat: AstSchemaMaterializer[Ctx]): Context[Ctx, Any] ⇒ Action[Ctx, _] = existing.resolve + mat: AstSchemaMaterializer[Ctx]): Context[Ctx, Any] => Action[Ctx, _] = existing.resolve def fieldTags(typeDefinition: Either[ast.TypeDefinition, ObjectLikeType[Ctx, _]], definition: ast.FieldDefinition): List[FieldTag] = Nil - def scalarCoerceUserInput(definition: ast.ScalarTypeDefinition): Any ⇒ Either[Violation, Any] = - _ ⇒ Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) + def scalarCoerceUserInput(definition: ast.ScalarTypeDefinition): Any => Either[Violation, Any] = + _ => Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) - def scalarCoerceInput(definition: ast.ScalarTypeDefinition): ast.Value ⇒ Either[Violation, Any] = - _ ⇒ Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) + def scalarCoerceInput(definition: ast.ScalarTypeDefinition): ast.Value => Either[Violation, Any] = + _ => Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) - def scalarCoerceOutput(definition: ast.ScalarTypeDefinition): (Any, Set[MarshallerCapability]) ⇒ Any = - (_, _) ⇒ throw DefaultIntrospectionSchemaBuilder.MaterializedSchemaException + def scalarCoerceOutput(definition: ast.ScalarTypeDefinition): (Any, Set[MarshallerCapability]) => Any = + (_, _) => throw DefaultIntrospectionSchemaBuilder.MaterializedSchemaException def scalarValueInfo(definition: ast.ScalarTypeDefinition): Set[ScalarValueInfo] = Set.empty @@ -761,7 +761,7 @@ class DefaultAstSchemaBuilder[Ctx] extends AstSchemaBuilder[Ctx] { def scalarComplexity(definition: ast.ScalarTypeDefinition): Double = 0.0D - def fieldComplexity(typeDefinition: Either[ast.TypeDefinition, ObjectLikeType[Ctx, _]], definition: ast.FieldDefinition): Option[(Ctx, Args, Double) ⇒ Double] = + def fieldComplexity(typeDefinition: Either[ast.TypeDefinition, ObjectLikeType[Ctx, _]], definition: ast.FieldDefinition): Option[(Ctx, Args, Double) => Double] = None def enumValueDeprecationReason(definition: ast.EnumValueDefinition): Option[String] = @@ -771,14 +771,14 @@ class DefaultAstSchemaBuilder[Ctx] extends AstSchemaBuilder[Ctx] { deprecationReason(definition.directives.toList) def deprecationReason(dirs: List[ast.Directive]): Option[String] = - dirs.find(_.name == DeprecatedDirective.name).flatMap { d ⇒ + dirs.find(_.name == DeprecatedDirective.name).flatMap { d => d.arguments.find(_.name == ReasonArg.name) match { - case Some(reason) ⇒ + case Some(reason) => reason.value match { - case ast.StringValue(value, _, _, _, _) ⇒ Some(value) - case _ ⇒ None + case ast.StringValue(value, _, _, _, _) => Some(value) + case _ => None } - case None ⇒ Some(DefaultDeprecationReason) + case None => Some(DefaultDeprecationReason) } } @@ -833,4 +833,4 @@ class DefaultAstSchemaBuilder[Ctx] extends AstSchemaBuilder[Ctx] { def enumValue(typeDefinition: Either[ast.EnumTypeDefinition, EnumType[_]], definition: ast.EnumValueDefinition): String = definition.name -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/schema/AstSchemaMaterializer.scala b/src/main/scala/sangria/schema/AstSchemaMaterializer.scala index 3dcbaea5..1824c5e6 100644 --- a/src/main/scala/sangria/schema/AstSchemaMaterializer.scala +++ b/src/main/scala/sangria/schema/AstSchemaMaterializer.scala @@ -21,7 +21,7 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A private val scalarAliasCache = Cache.empty[ScalarAlias[_, _], ScalarAlias[_, _]] private lazy val typeDefs: Vector[ast.TypeDefinition] = document.definitions.collect { - case d: ast.TypeDefinition ⇒ d + case d: ast.TypeDefinition => d } private lazy val typeDefsMat: Vector[MaterializedType] = typeDefs.map(MaterializedType(sdlOrigin, _)) @@ -31,35 +31,35 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A private lazy val additionalTypeDefsMap = builder.additionalTypes.groupBy(_.name).mapValues(_.head) private lazy val objectTypeExtensionDefs: Vector[ast.ObjectTypeExtensionDefinition] = allDefinitions.collect { - case d: ast.ObjectTypeExtensionDefinition ⇒ d + case d: ast.ObjectTypeExtensionDefinition => d } private lazy val interfaceTypeExtensionDefs: Vector[ast.InterfaceTypeExtensionDefinition] = allDefinitions.collect { - case d: ast.InterfaceTypeExtensionDefinition ⇒ d + case d: ast.InterfaceTypeExtensionDefinition => d } private lazy val inputObjectTypeExtensionDefs: Vector[ast.InputObjectTypeExtensionDefinition] = allDefinitions.collect { - case d: ast.InputObjectTypeExtensionDefinition ⇒ d + case d: ast.InputObjectTypeExtensionDefinition => d } private lazy val unionTypeExtensionDefs: Vector[ast.UnionTypeExtensionDefinition] = allDefinitions.collect { - case d: ast.UnionTypeExtensionDefinition ⇒ d + case d: ast.UnionTypeExtensionDefinition => d } private lazy val enumTypeExtensionDefs: Vector[ast.EnumTypeExtensionDefinition] = allDefinitions.collect { - case d: ast.EnumTypeExtensionDefinition ⇒ d + case d: ast.EnumTypeExtensionDefinition => d } private lazy val scalarTypeExtensionDefs: Vector[ast.ScalarTypeExtensionDefinition] = allDefinitions.collect { - case d: ast.ScalarTypeExtensionDefinition ⇒ d + case d: ast.ScalarTypeExtensionDefinition => d } private lazy val schemaExtensionDefs: Vector[ast.SchemaExtensionDefinition] = allDefinitions.collect { - case d: ast.SchemaExtensionDefinition ⇒ d + case d: ast.SchemaExtensionDefinition => d } private lazy val directiveDefs: Vector[ast.DirectiveDefinition] = allDefinitions.collect { - case d: ast.DirectiveDefinition ⇒ d + case d: ast.DirectiveDefinition => d } private lazy val directiveDefsMap = directiveDefs.groupBy(_.name) @@ -91,8 +91,8 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A val queryType = getTypeFromDef(existingOrigin, schema.query) AstSchemaMaterializer.findOperationsTypes(schemaExtensionDefs.flatMap(_.operationTypes), document.sourceMapper, true, schema.mutation.isDefined, schema.subscription.isDefined) match { - case Left(errors) ⇒ throw MaterializedSchemaValidationError(errors) - case Right((_, mutationExt, subscriptionExt)) ⇒ + case Left(errors) => throw MaterializedSchemaValidationError(errors) + case Right((_, mutationExt, subscriptionExt)) => val mutationType = mutationExt.map(getObjectType(sdlOrigin, _).asInstanceOf[ObjectType[Ctx, Val]]) orElse schema.mutation map (getTypeFromDef(existingOrigin, _)) @@ -120,13 +120,13 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A val defErrors = validateDefinitions extractSchemaInfo(document, typeDefs, schemaExtensionDefs) match { - case Left(schemaErrors) ⇒ throw MaterializedSchemaValidationError(schemaErrors ++ defErrors) - case _ if defErrors.nonEmpty ⇒ throw MaterializedSchemaValidationError(defErrors) - case Right(schemaInfo) ⇒ + case Left(schemaErrors) => throw MaterializedSchemaValidationError(schemaErrors ++ defErrors) + case _ if defErrors.nonEmpty => throw MaterializedSchemaValidationError(defErrors) + case Right(schemaInfo) => val queryType = getObjectType(sdlOrigin, schemaInfo.query) val mutationType = schemaInfo.mutation map (getObjectType(sdlOrigin, _)) val subscriptionType = schemaInfo.subscription map (getObjectType(sdlOrigin, _)) - val directives = directiveDefs filterNot (d ⇒ Schema.isBuiltInDirective(d.name)) flatMap (buildDirective(sdlOrigin, _)) + val directives = directiveDefs filterNot (d => Schema.isBuiltInDirective(d.name)) flatMap (buildDirective(sdlOrigin, _)) builder.buildSchema( schemaInfo.definition, @@ -145,7 +145,7 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A if (defErrors.nonEmpty) throw MaterializedSchemaValidationError(defErrors) - val directives = directiveDefs filterNot (d ⇒ Schema.isBuiltInDirective(d.name)) flatMap (buildDirective(sdlOrigin, _)) + val directives = directiveDefs filterNot (d => Schema.isBuiltInDirective(d.name)) flatMap (buildDirective(sdlOrigin, _)) val unused = findUnusedTypes() unused._1.toVector.map(getNamedType(sdlOrigin, _, None)) ++ unused._2 ++ directives @@ -154,15 +154,15 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A def validateExtensions(schema: Schema[Ctx, _]): Vector[Violation] = { val nestedErrors = Vector( typeDefsMap.toVector collect { - case (name, defs) if defs.size > 1 ⇒ + case (name, defs) if defs.size > 1 => NonUniqueTypeDefinitionViolation(name, document.sourceMapper, defs.flatMap(_.location).toList) - case (name, defs) if schema.allTypes contains name ⇒ + case (name, defs) if schema.allTypes contains name => ExistingTypeViolation(name, document.sourceMapper, defs.flatMap(_.location).toList) }, directiveDefsMap.toVector collect { - case (name, defs) if defs.size > 1 ⇒ + case (name, defs) if defs.size > 1 => NonUniqueDirectiveDefinitionViolation(name, document.sourceMapper, defs.flatMap(_.location).toList) - case (name, defs) if schema.directivesByName contains name ⇒ + case (name, defs) if schema.directivesByName contains name => NonUniqueDirectiveDefinitionViolation(name, document.sourceMapper, defs.flatMap(_.location).toList) }, objectTypeExtensionDefs flatMap (validateExtensions[ObjectType[_, _], ast.ObjectTypeDefinition](schema, _, "object")), @@ -180,23 +180,23 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A val astClass = implicitly[ClassTag[T2]].runtimeClass typeDefsMap.get(ext.name).map(_.head) match { - case Some(tpe) if astClass.isAssignableFrom(tpe.getClass) ⇒ None - case Some(tpe) ⇒ Some(TypeExtensionOnWrongKindViolation(typeKind, tpe.name, document.sourceMapper, ext.location.toList)) - case None ⇒ + case Some(tpe) if astClass.isAssignableFrom(tpe.getClass) => None + case Some(tpe) => Some(TypeExtensionOnWrongKindViolation(typeKind, tpe.name, document.sourceMapper, ext.location.toList)) + case None => schema.allTypes.get(ext.name) match { - case Some(tpe) if instClass.isAssignableFrom(tpe.getClass) ⇒ None - case Some(tpe) ⇒ Some(TypeExtensionOnWrongKindViolation(typeKind, tpe.name, document.sourceMapper, ext.location.toList)) - case None ⇒ validateExtensionsAdditional(instClass, astClass, ext, typeKind) + case Some(tpe) if instClass.isAssignableFrom(tpe.getClass) => None + case Some(tpe) => Some(TypeExtensionOnWrongKindViolation(typeKind, tpe.name, document.sourceMapper, ext.location.toList)) + case None => validateExtensionsAdditional(instClass, astClass, ext, typeKind) } } } def validateDefinitions: Vector[Violation] = { val nestedErrors = Vector ( - typeDefsMap.find(_._2.size > 1).toVector.map { case (name, defs) ⇒ + typeDefsMap.find(_._2.size > 1).toVector.map { case (name, defs) => NonUniqueTypeDefinitionViolation(name, document.sourceMapper, defs.flatMap(_.location).toList) }, - directiveDefsMap.find(_._2.size > 1).toVector.map { case (name, defs) ⇒ + directiveDefsMap.find(_._2.size > 1).toVector.map { case (name, defs) => NonUniqueDirectiveDefinitionViolation(name, document.sourceMapper, defs.flatMap(_.location).toList) }, objectTypeExtensionDefs flatMap (validateExtensionsAst[ObjectType[_, _], ast.ObjectTypeDefinition](_, "object")), @@ -214,34 +214,34 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A val astClass = implicitly[ClassTag[T2]].runtimeClass typeDefsMap.get(ext.name).map(_.head) match { - case Some(tpe) if astClass.isAssignableFrom(tpe.getClass) ⇒ None - case Some(tpe) ⇒ Some(TypeExtensionOnWrongKindViolation(typeKind, tpe.name, document.sourceMapper, ext.location.toList)) - case None ⇒ validateExtensionsAdditional(instClass, astClass, ext, typeKind) + case Some(tpe) if astClass.isAssignableFrom(tpe.getClass) => None + case Some(tpe) => Some(TypeExtensionOnWrongKindViolation(typeKind, tpe.name, document.sourceMapper, ext.location.toList)) + case None => validateExtensionsAdditional(instClass, astClass, ext, typeKind) } } private def validateExtensionsAdditional(instClass: Class[_], astClass: Class[_], ext: ast.TypeExtensionDefinition, typeKind: String) = { additionalTypeDefsMap.get(ext.name) match { - case Some(t) ⇒ t match { - case BuiltMaterializedTypeInst(_, tpe) if instClass.isAssignableFrom(tpe.getClass) ⇒ None - case BuiltMaterializedTypeInst(_, tpe) ⇒ Some(TypeExtensionOnWrongKindViolation(typeKind, tpe.name, document.sourceMapper, ext.location.toList)) - case MaterializedTypeInst(_, tpe) if instClass.isAssignableFrom(tpe.getClass) ⇒ None - case MaterializedTypeInst(_, tpe) ⇒ Some(TypeExtensionOnWrongKindViolation(typeKind, tpe.name, document.sourceMapper, ext.location.toList)) - case MaterializedTypeAst(_, tpe) if astClass.isAssignableFrom(tpe.getClass) ⇒ None - case MaterializedTypeAst(_, tpe) ⇒ Some(TypeExtensionOnWrongKindViolation(typeKind, tpe.name, document.sourceMapper, ext.location.toList)) + case Some(t) => t match { + case BuiltMaterializedTypeInst(_, tpe) if instClass.isAssignableFrom(tpe.getClass) => None + case BuiltMaterializedTypeInst(_, tpe) => Some(TypeExtensionOnWrongKindViolation(typeKind, tpe.name, document.sourceMapper, ext.location.toList)) + case MaterializedTypeInst(_, tpe) if instClass.isAssignableFrom(tpe.getClass) => None + case MaterializedTypeInst(_, tpe) => Some(TypeExtensionOnWrongKindViolation(typeKind, tpe.name, document.sourceMapper, ext.location.toList)) + case MaterializedTypeAst(_, tpe) if astClass.isAssignableFrom(tpe.getClass) => None + case MaterializedTypeAst(_, tpe) => Some(TypeExtensionOnWrongKindViolation(typeKind, tpe.name, document.sourceMapper, ext.location.toList)) } - case None ⇒ Some(TypeExtensionOnNonExistingTypeViolation(ext.name, document.sourceMapper, ext.location.toList)) + case None => Some(TypeExtensionOnNonExistingTypeViolation(ext.name, document.sourceMapper, ext.location.toList)) } } def findUnusedTypes(): (Set[String], Vector[Type with Named]) = { resolveAllLazyFields() - val referenced = typeDefCache.mapToSet((_, v) ⇒ v.name) - val notReferenced = typeDefs.filterNot(tpe ⇒ Schema.isBuiltInType(tpe.name) || referenced.contains(tpe.name)) - val notReferencedAdd = builder.additionalTypes.filterNot(tpe ⇒ Schema.isBuiltInType(tpe.name) || referenced.contains(tpe.name)) + val referenced = typeDefCache.mapToSet((_, v) => v.name) + val notReferenced = typeDefs.filterNot(tpe => Schema.isBuiltInType(tpe.name) || referenced.contains(tpe.name)) + val notReferencedAdd = builder.additionalTypes.filterNot(tpe => Schema.isBuiltInType(tpe.name) || referenced.contains(tpe.name)) - referenced → (notReferenced.map(tpe ⇒ getNamedType(sdlOrigin, tpe.name, tpe.location)) ++ notReferencedAdd.map(tpe ⇒ getNamedType(tpe.origin, tpe.name, tpe.location))) + referenced -> (notReferenced.map(tpe => getNamedType(sdlOrigin, tpe.name, tpe.location)) ++ notReferencedAdd.map(tpe => getNamedType(tpe.origin, tpe.name, tpe.location))) } def findUnusedTypes(schema: Schema[_, _], referenced: Set[String]): Vector[Type with Named] = { @@ -249,9 +249,9 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A resolveAllLazyFields() - val notReferenced = schema.typeList.filterNot(tpe ⇒ Schema.isBuiltInType(tpe.name) || referenced.contains(tpe.name)) + val notReferenced = schema.typeList.filterNot(tpe => Schema.isBuiltInType(tpe.name) || referenced.contains(tpe.name)) - notReferenced map (tpe ⇒ getTypeFromDef(existingOrigin, tpe)) + notReferenced map (tpe => getTypeFromDef(existingOrigin, tpe)) } // TODO: think about better solution @@ -265,9 +265,9 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A iteration += 1 typeDefCache.forEachValue { - case o: ObjectLikeType[_, _] ⇒ o.fields - case o: InputObjectType[_] ⇒ o.fields - case _ ⇒ // do nothing + case o: ObjectLikeType[_, _] => o.fields + case o: InputObjectType[_] => o.fields + case _ => // do nothing } newCount = typeDefCache.size @@ -275,24 +275,24 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A } def getTypeFromExistingType(origin: MatOrigin, tpe: OutputType[_]): OutputType[Any] = tpe match { - case ListType(ofType) ⇒ ListType(getTypeFromExistingType(origin, ofType)) - case OptionType(ofType) ⇒ OptionType(getTypeFromExistingType(origin, ofType)) - case t: Named ⇒ getTypeFromDef(origin, t) + case ListType(ofType) => ListType(getTypeFromExistingType(origin, ofType)) + case OptionType(ofType) => OptionType(getTypeFromExistingType(origin, ofType)) + case t: Named => getTypeFromDef(origin, t) } def getInputTypeFromExistingType(origin: MatOrigin, tpe: InputType[_]): InputType[Any] = tpe match { - case ListInputType(ofType) ⇒ ListInputType(getInputTypeFromExistingType(origin, ofType)) - case OptionInputType(ofType) ⇒ OptionInputType(getInputTypeFromExistingType(origin, ofType)) - case t: Named ⇒ getTypeFromDef(origin, t) + case ListInputType(ofType) => ListInputType(getInputTypeFromExistingType(origin, ofType)) + case OptionInputType(ofType) => OptionInputType(getInputTypeFromExistingType(origin, ofType)) + case t: Named => getTypeFromDef(origin, t) } def getTypeFromDef[T <: Type with Named](origin: MatOrigin, tpe: T): T = tpe match { - case alias: ScalarAlias[Any, Any] @unchecked ⇒ + case alias: ScalarAlias[Any, Any] @unchecked => scalarAliasCache.getOrElseUpdate(alias, { extendScalarAlias(origin, alias) }).asInstanceOf[T] - case _ ⇒ + case _ => getNamedType(origin, tpe.name, None).asInstanceOf[T] } @@ -308,50 +308,50 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A def getObjectType(origin: MatOrigin, tpe: ast.NamedType): ObjectType[Ctx, Any] = getOutputType(origin, tpe, optional = false) match { - case obj: ObjectType[_, _] ⇒ obj.asInstanceOf[ObjectType[Ctx, Any]] - case _ ⇒ throw MaterializedSchemaValidationError(Vector(InvalidTypeUsageViolation("object", QueryRenderer.render(tpe), document.sourceMapper, tpe.location.toList))) + case obj: ObjectType[_, _] => obj.asInstanceOf[ObjectType[Ctx, Any]] + case _ => throw MaterializedSchemaValidationError(Vector(InvalidTypeUsageViolation("object", QueryRenderer.render(tpe), document.sourceMapper, tpe.location.toList))) } def getScalarType(origin: MatOrigin, tpe: ast.NamedType): ScalarType[Any] = getOutputType(origin, tpe, optional = false) match { - case obj: ScalarType[_] ⇒ obj.asInstanceOf[ScalarType[Any]] - case _ ⇒ throw MaterializedSchemaValidationError(Vector(InvalidTypeUsageViolation("scalar", QueryRenderer.render(tpe), document.sourceMapper, tpe.location.toList))) + case obj: ScalarType[_] => obj.asInstanceOf[ScalarType[Any]] + case _ => throw MaterializedSchemaValidationError(Vector(InvalidTypeUsageViolation("scalar", QueryRenderer.render(tpe), document.sourceMapper, tpe.location.toList))) } def getInterfaceType(origin: MatOrigin, tpe: ast.NamedType): InterfaceType[Ctx, Any] = getOutputType(origin, tpe, optional = false) match { - case obj: InterfaceType[_, _] ⇒ obj.asInstanceOf[InterfaceType[Ctx, Any]] - case _ ⇒ throw MaterializedSchemaValidationError(Vector(InvalidTypeUsageViolation("interface", QueryRenderer.render(tpe), document.sourceMapper, tpe.location.toList))) + case obj: InterfaceType[_, _] => obj.asInstanceOf[InterfaceType[Ctx, Any]] + case _ => throw MaterializedSchemaValidationError(Vector(InvalidTypeUsageViolation("interface", QueryRenderer.render(tpe), document.sourceMapper, tpe.location.toList))) } def getInputType(origin: MatOrigin, tpe: ast.Type, replacementNamedType: Option[InputType[_]] = None, optional: Boolean = true): InputType[_] = tpe match { - case ast.ListType(ofType, _) if optional ⇒ OptionInputType(ListInputType(getInputType(origin, ofType, replacementNamedType, true))) - case ast.ListType(ofType, _) ⇒ ListInputType(getInputType(origin, ofType, replacementNamedType, true)) - case ast.NotNullType(ofType, _) ⇒ getInputType(origin, ofType, replacementNamedType, false) - case ast.NamedType(name, _) ⇒ + case ast.ListType(ofType, _) if optional => OptionInputType(ListInputType(getInputType(origin, ofType, replacementNamedType, true))) + case ast.ListType(ofType, _) => ListInputType(getInputType(origin, ofType, replacementNamedType, true)) + case ast.NotNullType(ofType, _) => getInputType(origin, ofType, replacementNamedType, false) + case ast.NamedType(name, _) => replacementNamedType getOrElse getNamedType(origin, name, tpe.location) match { - case input: InputType[_] if optional ⇒ OptionInputType(input) - case input: InputType[_] ⇒ input - case _ ⇒ throw MaterializedSchemaValidationError(Vector(InvalidTypeUsageViolation("input type", QueryRenderer.render(tpe), document.sourceMapper, tpe.location.toList))) + case input: InputType[_] if optional => OptionInputType(input) + case input: InputType[_] => input + case _ => throw MaterializedSchemaValidationError(Vector(InvalidTypeUsageViolation("input type", QueryRenderer.render(tpe), document.sourceMapper, tpe.location.toList))) } } def getOutputType(origin: MatOrigin, tpe: ast.Type, replacementNamedType: Option[OutputType[_]] = None, optional: Boolean = true): OutputType[_] = tpe match { - case ast.ListType(ofType, _) if optional ⇒ OptionType(ListType(getOutputType(origin, ofType, replacementNamedType, true))) - case ast.ListType(ofType, _) ⇒ ListType(getOutputType(origin, ofType, replacementNamedType, true)) - case ast.NotNullType(ofType, _) ⇒ getOutputType(origin, ofType, replacementNamedType, false) - case ast.NamedType(name, _) ⇒ + case ast.ListType(ofType, _) if optional => OptionType(ListType(getOutputType(origin, ofType, replacementNamedType, true))) + case ast.ListType(ofType, _) => ListType(getOutputType(origin, ofType, replacementNamedType, true)) + case ast.NotNullType(ofType, _) => getOutputType(origin, ofType, replacementNamedType, false) + case ast.NamedType(name, _) => replacementNamedType getOrElse getNamedType(origin, name, tpe.location) match { - case out: OutputType[_] if optional ⇒ OptionType(out) - case out: OutputType[_] ⇒ out - case _ ⇒ throw MaterializedSchemaValidationError(Vector(InvalidTypeUsageViolation("output type", QueryRenderer.render(tpe), document.sourceMapper, tpe.location.toList))) + case out: OutputType[_] if optional => OptionType(out) + case out: OutputType[_] => out + case _ => throw MaterializedSchemaValidationError(Vector(InvalidTypeUsageViolation("output type", QueryRenderer.render(tpe), document.sourceMapper, tpe.location.toList))) } } def getNamedType(origin: MatOrigin, typeName: String, location: Option[AstLocation]): Type with Named = - typeDefCache.getOrElseUpdate(origin → typeName, Schema.getBuiltInType(typeName) getOrElse { + typeDefCache.getOrElseUpdate(origin -> typeName, Schema.getBuiltInType(typeName) getOrElse { val existing = existingDefsMat.get(typeName).toVector val sdl = typeDefsMat.filter(_.name == typeName) val additional = builder.additionalTypes.filter(_.name == typeName).toVector @@ -363,7 +363,7 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A val resolved = builder.resolveNameConflict( origin, allCandidates ++ - typeDefCache.find((_, v) ⇒ v.name == typeName).map{case ((o, _), v) ⇒ BuiltMaterializedTypeInst(o, v)}.toVector) + typeDefCache.find((_, v) => v.name == typeName).map{case ((o, _), v) => BuiltMaterializedTypeInst(o, v)}.toVector) if (!resolved.isInstanceOf[BuiltMaterializedTypeInst] && typeDefCache.keyExists(_._2 == resolved.name)) throw SchemaMaterializationException("Name conflict resolution produced already existing type name") @@ -378,28 +378,28 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A def getNamedType(origin: MatOrigin, tpe: MaterializedType): Option[Type with Named] = tpe match { - case BuiltMaterializedTypeInst(o, t) ⇒ Some(t) - case MaterializedTypeInst(o, t) ⇒ Some(extendType(o, t)) - case MaterializedTypeAst(o, t) ⇒ buildType(o, t) + case BuiltMaterializedTypeInst(o, t) => Some(t) + case MaterializedTypeInst(o, t) => Some(extendType(o, t)) + case MaterializedTypeAst(o, t) => buildType(o, t) } def buildType(origin: MatOrigin, definition: TypeDefinition): Option[Type with Named] = definition match { - case d: ast.ObjectTypeDefinition ⇒ buildObjectDef(origin, d) - case d: ast.InterfaceTypeDefinition ⇒ buildInterfaceDef(origin, d) - case d: ast.UnionTypeDefinition ⇒ buildUnionDef(origin, d) - case d: ast.InputObjectTypeDefinition ⇒ buildInputObjectDef(origin, d) - case d: ast.ScalarTypeDefinition ⇒ buildScalarDef(origin, d) - case d: ast.EnumTypeDefinition ⇒ buildEnumDef(origin, d) + case d: ast.ObjectTypeDefinition => buildObjectDef(origin, d) + case d: ast.InterfaceTypeDefinition => buildInterfaceDef(origin, d) + case d: ast.UnionTypeDefinition => buildUnionDef(origin, d) + case d: ast.InputObjectTypeDefinition => buildInputObjectDef(origin, d) + case d: ast.ScalarTypeDefinition => buildScalarDef(origin, d) + case d: ast.EnumTypeDefinition => buildEnumDef(origin, d) } def extendType(origin: MatOrigin, existingType: Type with Named): Type with Named = existingType match { - case tpe: ScalarType[_] ⇒ builder.transformScalarType(origin, findScalarExtensions(tpe.name), tpe, this) - case tpe: ScalarAlias[_, _] ⇒ extendScalarAlias(origin, tpe.asInstanceOf[ScalarAlias[Any, Any]]) - case tpe: EnumType[_] ⇒ extendEnumType(origin, tpe) - case tpe: InputObjectType[_] ⇒ extendInputObjectType(origin, tpe) - case tpe: UnionType[Ctx] ⇒ extendUnionType(origin, tpe) - case tpe: ObjectType[Ctx, _] ⇒ extendObjectType(origin, tpe) - case tpe: InterfaceType[Ctx, _] ⇒ extendInterfaceType(origin, tpe) + case tpe: ScalarType[_] => builder.transformScalarType(origin, findScalarExtensions(tpe.name), tpe, this) + case tpe: ScalarAlias[_, _] => extendScalarAlias(origin, tpe.asInstanceOf[ScalarAlias[Any, Any]]) + case tpe: EnumType[_] => extendEnumType(origin, tpe) + case tpe: InputObjectType[_] => extendInputObjectType(origin, tpe) + case tpe: UnionType[Ctx] => extendUnionType(origin, tpe) + case tpe: ObjectType[Ctx, _] => extendObjectType(origin, tpe) + case tpe: InterfaceType[Ctx, _] => extendInterfaceType(origin, tpe) } def buildField(origin: MatOrigin, typeDefinition: Either[ast.TypeDefinition, ObjectLikeType[Ctx, _]], extensions: Vector[ast.ObjectLikeTypeExtensionDefinition], field: ast.FieldDefinition) = { @@ -434,7 +434,7 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A origin, tpe, extensions.toList, - () ⇒ buildFields(origin, tpe, tpe.fields, extensions).toList, + () => buildFields(origin, tpe, tpe.fields, extensions).toList, buildInterfaces(origin, tpe, tpe.interfaces, extensions).toList, this) } @@ -446,7 +446,7 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A origin, tpe, extensions.toList, - () ⇒ extendFields(origin, tpe, extensions), + () => extendFields(origin, tpe, extensions), extendInterfaces(origin, tpe, extensions), this) } @@ -454,7 +454,7 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A def buildInterfaceDef(origin: MatOrigin, tpe: ast.InterfaceTypeDefinition) = { val extensions = findInterfaceExtensions(tpe.name) - builder.buildInterfaceType(origin, tpe, extensions.toList, () ⇒ buildFields(origin, tpe, tpe.fields, extensions).toList, this) + builder.buildInterfaceType(origin, tpe, extensions.toList, () => buildFields(origin, tpe, tpe.fields, extensions).toList, this) } def extendEnumType(origin: MatOrigin, tpe: EnumType[_]) = { @@ -475,7 +475,7 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A val extensions = findInputObjectExtensions(tpe.name) val extraFields = extensions.flatMap(_.fields) - val fieldsFn = () ⇒ { + val fieldsFn = () => { val ef = extraFields flatMap (buildInputField(origin, Right(tpe), _, extensions)) toList val f = tpe.fields map (extendInputField(origin, tpe, _)) @@ -488,7 +488,7 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A def extendInterfaceType(origin: MatOrigin, tpe: InterfaceType[Ctx, _]) = { val extensions = findInterfaceExtensions(tpe.name) - builder.extendInterfaceType(origin, tpe, extensions.toList, () ⇒ extendFields(origin, tpe, extensions), this) + builder.extendInterfaceType(origin, tpe, extensions.toList, () => extendFields(origin, tpe, extensions), this) } def buildInterfaces(origin: MatOrigin, tpe: ast.ObjectTypeDefinition, interfaces: Vector[ast.NamedType], extensions: Vector[ast.ObjectTypeExtensionDefinition]) = { @@ -513,17 +513,17 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A val withExtensions = fieldDefs ++ extraFields val addFields = builder.buildAdditionalFields(origin, extensions, tpe, this).flatMap { - case MaterializedFieldAst(o, ast) ⇒ buildField(o, Left(tpe), extensions, ast) - case MaterializedFieldInst(o, definition) ⇒ Some(extendField(o, None, definition)) + case MaterializedFieldAst(o, ast) => buildField(o, Left(tpe), extensions, ast) + case MaterializedFieldInst(o, definition) => Some(extendField(o, None, definition)) } withExtensions.flatMap(buildField(origin, Left(tpe), extensions, _)) ++ addFields } def extendFields(origin: MatOrigin, tpe: ObjectLikeType[Ctx, _], extensions: Vector[ast.ObjectLikeTypeExtensionDefinition]) = { - val extraFields = extensions.flatMap(e ⇒ e.fields map (e → _)) + val extraFields = extensions.flatMap(e => e.fields map (e -> _)) - val ef = extraFields flatMap (f ⇒ buildField(origin, Right(tpe), extensions, f._2)) + val ef = extraFields flatMap (f => buildField(origin, Right(tpe), extensions, f._2)) val of = tpe.uniqueFields.toList map (extendField(origin, Some(tpe), _)) of ++ ef @@ -576,7 +576,7 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A val extraFields = extensions.flatMap(_.fields) val withExtensions = tpe.fields ++ extraFields - builder.buildInputObjectType(origin, extensions, tpe, () ⇒ withExtensions flatMap (buildInputField(origin, Left(tpe), _, extensions)) toList, this) + builder.buildInputObjectType(origin, extensions, tpe, () => withExtensions flatMap (buildInputField(origin, Left(tpe), _, extensions)) toList, this) } def buildScalarDef(origin: MatOrigin, tpe: ast.ScalarTypeDefinition) = { @@ -597,7 +597,7 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A builder.buildEnumValue(origin, extensions, typeDef, value, this) def buildDefault(defaultValue: Option[ast.Value]) = - defaultValue map (dv ⇒ dv → sangria.marshalling.queryAst.queryAstToInput) + defaultValue map (dv => dv -> sangria.marshalling.queryAst.queryAstToInput) def buildArgument(origin: MatOrigin, typeDefinition: Either[ast.TypeSystemDefinition, ObjectLikeType[Ctx, _]], fieldDef: Option[ast.FieldDefinition], value: ast.InputValueDefinition) = { val default = buildDefault(value.defaultValue) @@ -617,7 +617,7 @@ class AstSchemaMaterializer[Ctx] private (val document: ast.Document, builder: A try { DirectiveLocation.fromString(loc.name) } catch { - case e: MatchError ⇒ throw SchemaMaterializationException(s"Unknown directive location '${loc.name}'.") + case e: MatchError => throw SchemaMaterializationException(s"Unknown directive location '${loc.name}'.") } } @@ -625,7 +625,7 @@ object AstSchemaMaterializer { case class SchemaInfo(query: ast.NamedType, mutation: Option[ast.NamedType], subscription: Option[ast.NamedType], definition: Option[ast.SchemaDefinition]) def extractSchemaInfo(document: ast.Document, typeDefs: Vector[ast.TypeDefinition], extensions: Vector[ast.SchemaExtensionDefinition]): Either[Vector[Violation], SchemaInfo] = { - val schemas = document.definitions.collect {case s: ast.SchemaDefinition ⇒ s} + val schemas = document.definitions.collect {case s: ast.SchemaDefinition => s} val schemaErrors = if (schemas.size > 1) @@ -634,40 +634,40 @@ object AstSchemaMaterializer { if (schemas.nonEmpty) { val validatedInfo = - schemas.map { schema ⇒ + schemas.map { schema => val allOperationTypes = schema.operationTypes ++ extensions.flatMap(_.operationTypes) findOperationsTypes(allOperationTypes, document.sourceMapper, false, false, false) - .right.map {case (query, mutation, subscription) ⇒ SchemaInfo(query.get, mutation, subscription, Some(schema))} + .right.map {case (query, mutation, subscription) => SchemaInfo(query.get, mutation, subscription, Some(schema))} } - val typeErrors = validatedInfo.collect{case Left(errors) ⇒ errors}.flatten + val typeErrors = validatedInfo.collect{case Left(errors) => errors}.flatten if (schemaErrors.nonEmpty || typeErrors.nonEmpty) Left(schemaErrors ++ typeErrors) else validatedInfo.head } else { typeDefs.find(_.name == "Query") match { - case None ⇒ + case None => Left(schemaErrors :+ NoQueryTypeViolation(document.sourceMapper, document.location.toList)) - case Some(_) if schemaErrors.nonEmpty ⇒ + case Some(_) if schemaErrors.nonEmpty => Left(schemaErrors) - case Some(query) ⇒ - val mutation = typeDefs.find(_.name == "Mutation") map (t ⇒ ast.NamedType(t.name)) - val subscription = typeDefs.find(_.name == "Subscription") map (t ⇒ ast.NamedType(t.name)) + case Some(query) => + val mutation = typeDefs.find(_.name == "Mutation") map (t => ast.NamedType(t.name)) + val subscription = typeDefs.find(_.name == "Subscription") map (t => ast.NamedType(t.name)) findOperationsTypes(extensions.flatMap(_.operationTypes), document.sourceMapper, true, mutation.isDefined, subscription.isDefined).right.map { - case (_, mutationExt, subscriptionExt) ⇒ SchemaInfo(ast.NamedType(query.name), mutationExt orElse mutation, subscriptionExt orElse subscription, None) + case (_, mutationExt, subscriptionExt) => SchemaInfo(ast.NamedType(query.name), mutationExt orElse mutation, subscriptionExt orElse subscription, None) } } } } def findOperationsTypes(allOperationTypes: Vector[ast.OperationTypeDefinition], sourceMapper: Option[SourceMapper], queryAlreadyExists: Boolean, mutationAlreadyExists: Boolean, subscriptionAlreadyExists: Boolean): Either[Vector[Violation], (Option[ast.NamedType], Option[ast.NamedType], Option[ast.NamedType])] = { - val queries = allOperationTypes.collect {case ast.OperationTypeDefinition(OperationType.Query, tpe, _, _) ⇒ tpe} - val mutations = allOperationTypes.collect {case ast.OperationTypeDefinition(OperationType.Mutation, tpe, _, _) ⇒ tpe} - val subscriptions = allOperationTypes.collect {case ast.OperationTypeDefinition(OperationType.Subscription, tpe, _, _) ⇒ tpe} + val queries = allOperationTypes.collect {case ast.OperationTypeDefinition(OperationType.Query, tpe, _, _) => tpe} + val mutations = allOperationTypes.collect {case ast.OperationTypeDefinition(OperationType.Mutation, tpe, _, _) => tpe} + val subscriptions = allOperationTypes.collect {case ast.OperationTypeDefinition(OperationType.Subscription, tpe, _, _) => tpe} val qErrors = if ((!queryAlreadyExists && queries.size != 1) || (queryAlreadyExists && queries.nonEmpty)) diff --git a/src/main/scala/sangria/schema/AstSchemaResolver.scala b/src/main/scala/sangria/schema/AstSchemaResolver.scala index f5afeb31..741e9a54 100644 --- a/src/main/scala/sangria/schema/AstSchemaResolver.scala +++ b/src/main/scala/sangria/schema/AstSchemaResolver.scala @@ -33,24 +33,24 @@ object LegacyCommentDescriptionsResolver { case class DirectiveResolver[Ctx]( directive: Directive, - resolve: AstDirectiveContext[Ctx] ⇒ Action[Ctx, Any], - complexity: Option[ComplexityDirectiveContext[Ctx] ⇒ (Ctx, Args, Double) ⇒ Double] = None) extends AstSchemaResolver[Ctx] + resolve: AstDirectiveContext[Ctx] => Action[Ctx, Any], + complexity: Option[ComplexityDirectiveContext[Ctx] => (Ctx, Args, Double) => Double] = None) extends AstSchemaResolver[Ctx] case class DirectiveFieldProvider[Ctx]( directive: Directive, - resolve: DirectiveFieldProviderContext[Ctx] ⇒ List[MaterializedField[Ctx, _]]) extends AstSchemaResolver[Ctx] + resolve: DirectiveFieldProviderContext[Ctx] => List[MaterializedField[Ctx, _]]) extends AstSchemaResolver[Ctx] case class DynamicDirectiveFieldProvider[Ctx, A]( directiveName: String, - resolve: DynamicDirectiveFieldProviderContext[Ctx, A] ⇒ List[MaterializedField[Ctx, _]])(implicit val marshaller: ResultMarshallerForType[A]) extends AstSchemaResolver[Ctx] + resolve: DynamicDirectiveFieldProviderContext[Ctx, A] => List[MaterializedField[Ctx, _]])(implicit val marshaller: ResultMarshallerForType[A]) extends AstSchemaResolver[Ctx] case class DirectiveInputTypeResolver[Ctx]( directive: Directive, - resolve: AstDirectiveInputTypeContext[Ctx] ⇒ InputType[Any]) extends AstSchemaResolver[Ctx] + resolve: AstDirectiveInputTypeContext[Ctx] => InputType[Any]) extends AstSchemaResolver[Ctx] case class DirectiveOutputTypeResolver[Ctx]( directive: Directive, - resolve: AstDirectiveOutputTypeContext[Ctx] ⇒ OutputType[Any]) extends AstSchemaResolver[Ctx] + resolve: AstDirectiveOutputTypeContext[Ctx] => OutputType[Any]) extends AstSchemaResolver[Ctx] case class InputTypeResolver[Ctx]( resolve: PartialFunction[AstInputTypeContext[Ctx], InputType[Any]]) extends AstSchemaResolver[Ctx] @@ -60,7 +60,7 @@ case class OutputTypeResolver[Ctx]( case class DirectiveScalarResolver[Ctx]( directive: Directive, - resolve: AstDirectiveScalarContext ⇒ ScalarType[_]) extends AstSchemaResolver[Ctx] + resolve: AstDirectiveScalarContext => ScalarType[_]) extends AstSchemaResolver[Ctx] case class SimpleEnumValueResolver[Ctx]( resolve: PartialFunction[(Either[ast.EnumTypeDefinition, EnumType[_]], ast.EnumValueDefinition), String]) extends AstSchemaResolver[Ctx] @@ -75,19 +75,19 @@ case class ScalarResolver[Ctx](resolve: PartialFunction[ast.ScalarTypeDefinition case class DynamicDirectiveResolver[Ctx, T]( directiveName: String, - resolve: DynamicDirectiveContext[Ctx, T] ⇒ Action[Ctx, Any], - complexity: Option[ComplexityDynamicDirectiveContext[Ctx, T] ⇒ (Ctx, Args, Double) ⇒ Double] = None)(implicit val marshaller: ResultMarshallerForType[T]) extends AstSchemaResolver[Ctx] + resolve: DynamicDirectiveContext[Ctx, T] => Action[Ctx, Any], + complexity: Option[ComplexityDynamicDirectiveContext[Ctx, T] => (Ctx, Args, Double) => Double] = None)(implicit val marshaller: ResultMarshallerForType[T]) extends AstSchemaResolver[Ctx] case class FieldResolver[Ctx]( - resolve: PartialFunction[(Either[ast.TypeDefinition, ObjectLikeType[Ctx, _]], ast.FieldDefinition), Context[Ctx, _] ⇒ Action[Ctx, Any]], - complexity: PartialFunction[(Either[ast.TypeDefinition, ObjectLikeType[Ctx, _]], ast.FieldDefinition), (Ctx, Args, Double) ⇒ Double] = PartialFunction.empty) extends AstSchemaResolver[Ctx] + resolve: PartialFunction[(Either[ast.TypeDefinition, ObjectLikeType[Ctx, _]], ast.FieldDefinition), Context[Ctx, _] => Action[Ctx, Any]], + complexity: PartialFunction[(Either[ast.TypeDefinition, ObjectLikeType[Ctx, _]], ast.FieldDefinition), (Ctx, Args, Double) => Double] = PartialFunction.empty) extends AstSchemaResolver[Ctx] object FieldResolver { - def map[Ctx](config: (String, Map[String, Context[Ctx, _] ⇒ Action[Ctx, Any]])*): FieldResolver[Ctx] = { + def map[Ctx](config: (String, Map[String, Context[Ctx, _] => Action[Ctx, Any]])*): FieldResolver[Ctx] = { val configMap = config.toMap FieldResolver { - case (TypeName(name), field) if configMap.contains(name) && configMap(name).contains(field.name) ⇒ + case (TypeName(name), field) if configMap.contains(name) && configMap(name).contains(field.name) => configMap(name)(field.name) } } @@ -97,14 +97,14 @@ object FieldResolver { } case class ExistingFieldResolver[Ctx]( - resolve: PartialFunction[(MatOrigin, Option[ObjectLikeType[Ctx, _]], Field[Ctx, _]), Context[Ctx, _] ⇒ Action[Ctx, Any]]) extends AstSchemaResolver[Ctx] + resolve: PartialFunction[(MatOrigin, Option[ObjectLikeType[Ctx, _]], Field[Ctx, _]), Context[Ctx, _] => Action[Ctx, Any]]) extends AstSchemaResolver[Ctx] object ExistingFieldResolver { - def map[Ctx](config: (String, Map[String, Context[Ctx, _] ⇒ Action[Ctx, Any]])*): ExistingFieldResolver[Ctx] = { + def map[Ctx](config: (String, Map[String, Context[Ctx, _] => Action[Ctx, Any]])*): ExistingFieldResolver[Ctx] = { val configMap = config.toMap ExistingFieldResolver { - case (_, tpe, field) if tpe.isDefined && configMap.contains(tpe.get.name) && configMap(tpe.get.name).contains(field.name) ⇒ + case (_, tpe, field) if tpe.isDefined && configMap.contains(tpe.get.name) && configMap(tpe.get.name).contains(field.name) => configMap(tpe.get.name)(field.name) } } @@ -114,7 +114,7 @@ object ExistingFieldResolver { } case class AnyFieldResolver[Ctx]( - resolve: PartialFunction[MatOrigin, Context[Ctx, _] ⇒ Action[Ctx, Any]]) extends AstSchemaResolver[Ctx] + resolve: PartialFunction[MatOrigin, Context[Ctx, _] => Action[Ctx, Any]]) extends AstSchemaResolver[Ctx] object AnyFieldResolver { def defaultInput[Ctx, In : InputUnmarshaller] = @@ -122,11 +122,11 @@ object AnyFieldResolver { } case class InstanceCheck[Ctx]( - fn: InstanceCheckContext[Ctx] ⇒ (Any, Class[_]) ⇒ Boolean) extends AstSchemaResolver[Ctx] + fn: InstanceCheckContext[Ctx] => (Any, Class[_]) => Boolean) extends AstSchemaResolver[Ctx] object InstanceCheck { - def simple[Ctx](fn: Any ⇒ String): InstanceCheck[Ctx] = - InstanceCheck(c ⇒ (value, _) ⇒ fn(value) == c.definition.name) + def simple[Ctx](fn: Any => String): InstanceCheck[Ctx] = + InstanceCheck(c => (value, _) => fn(value) == c.definition.name) def field[Ctx, T : InputUnmarshaller]: InstanceCheck[Ctx] = field[Ctx, T]("type") @@ -134,22 +134,22 @@ object InstanceCheck { def field[Ctx, T : InputUnmarshaller](fieldName: String): InstanceCheck[Ctx] = { val iu = implicitly[InputUnmarshaller[T]] - InstanceCheck(c ⇒ (value, _) ⇒ { + InstanceCheck(c => (value, _) => { val node = value.asInstanceOf[T] if (!iu.isMapNode(node)) false else iu.getMapValue(node, fieldName) match { - case Some(v) ⇒ iu.isScalarNode(v) && iu.getScalaScalarValue(v) == c.definition.name - case None ⇒ false + case Some(v) => iu.isScalarNode(v) && iu.getScalaScalarValue(v) == c.definition.name + case None => false } }) } } case class ExistingInstanceCheck[Ctx]( - fn: ExistingInstanceCheckContext[Ctx] ⇒ (Any, Class[_]) ⇒ Boolean) extends AstSchemaResolver[Ctx] + fn: ExistingInstanceCheckContext[Ctx] => (Any, Class[_]) => Boolean) extends AstSchemaResolver[Ctx] -case class ConflictResolver[Ctx](resolve: (MatOrigin, Vector[MaterializedType]) ⇒ MaterializedType) extends AstSchemaResolver[Ctx] +case class ConflictResolver[Ctx](resolve: (MatOrigin, Vector[MaterializedType]) => MaterializedType) extends AstSchemaResolver[Ctx] sealed trait AstSchemaGenericResolver[T] { def locations: Set[DirectiveLocation.Value] @@ -159,7 +159,7 @@ sealed trait AstSchemaGenericResolver[T] { case class GenericDirectiveResolver[T]( directive: Directive, locations: Set[DirectiveLocation.Value] = Set.empty, - resolve: GenericDirectiveContext ⇒ Option[T]) extends AstSchemaGenericResolver[T] { + resolve: GenericDirectiveContext => Option[T]) extends AstSchemaGenericResolver[T] { def directiveName = directive.name } @@ -192,7 +192,7 @@ trait WithTypeLookup[Ctx] { case class GenericDynamicDirectiveResolver[T, A]( directiveName: String, locations: Set[DirectiveLocation.Value] = Set.empty, - resolve: GenericDynamicDirectiveContext[A] ⇒ Option[T])(implicit val marshaller: ResultMarshallerForType[T]) extends AstSchemaGenericResolver[T] + resolve: GenericDynamicDirectiveContext[A] => Option[T])(implicit val marshaller: ResultMarshallerForType[T]) extends AstSchemaGenericResolver[T] case class AstDirectiveInputTypeContext[Ctx]( origin: MatOrigin, @@ -309,4 +309,4 @@ case class InstanceCheckContext[Ctx]( case class ExistingInstanceCheckContext[Ctx]( origin: MatOrigin, tpe: ObjectType[Ctx, _], - extensions: List[ast.ObjectTypeExtensionDefinition]) \ No newline at end of file + extensions: List[ast.ObjectTypeExtensionDefinition]) diff --git a/src/main/scala/sangria/schema/Context.scala b/src/main/scala/sangria/schema/Context.scala index 21d0ea94..f3979702 100644 --- a/src/main/scala/sangria/schema/Context.scala +++ b/src/main/scala/sangria/schema/Context.scala @@ -15,13 +15,13 @@ import scala.util.{Failure, Try} import scala.util.control.NonFatal sealed trait Action[+Ctx, +Val] { - def map[NewVal](fn: Val ⇒ NewVal)(implicit ec: ExecutionContext): Action[Ctx, NewVal] + def map[NewVal](fn: Val => NewVal)(implicit ec: ExecutionContext): Action[Ctx, NewVal] } sealed trait LeafAction[+Ctx, +Val] extends Action[Ctx, Val] { - def map[NewVal](fn: Val ⇒ NewVal)(implicit ec: ExecutionContext): LeafAction[Ctx, NewVal] + def map[NewVal](fn: Val => NewVal)(implicit ec: ExecutionContext): LeafAction[Ctx, NewVal] } sealed trait ReduceAction[+Ctx, +Val] extends Action[Ctx, Val] { - def map[NewVal](fn: Val ⇒ NewVal)(implicit ec: ExecutionContext): LeafAction[Ctx, NewVal] + def map[NewVal](fn: Val => NewVal)(implicit ec: ExecutionContext): LeafAction[Ctx, NewVal] } object ReduceAction { @@ -58,99 +58,99 @@ object LeafAction { } case class Value[Ctx, Val](value: Val) extends LeafAction[Ctx, Val] with ReduceAction[Ctx, Val] { - override def map[NewVal](fn: Val ⇒ NewVal)(implicit ec: ExecutionContext): LeafAction[Ctx, NewVal] = + override def map[NewVal](fn: Val => NewVal)(implicit ec: ExecutionContext): LeafAction[Ctx, NewVal] = try Value(fn(value)) catch { - case NonFatal(e) ⇒ TryValue(Failure(e)) + case NonFatal(e) => TryValue(Failure(e)) } } case class TryValue[Ctx, Val](value: Try[Val]) extends LeafAction[Ctx, Val] with ReduceAction[Ctx, Val] { - override def map[NewVal](fn: Val ⇒ NewVal)(implicit ec: ExecutionContext): TryValue[Ctx, NewVal] = + override def map[NewVal](fn: Val => NewVal)(implicit ec: ExecutionContext): TryValue[Ctx, NewVal] = TryValue(value map fn) } case class PartialValue[Ctx, Val](value: Val, errors: Vector[Throwable]) extends LeafAction[Ctx, Val] { - override def map[NewVal](fn: Val ⇒ NewVal)(implicit ec: ExecutionContext): LeafAction[Ctx, NewVal] = + override def map[NewVal](fn: Val => NewVal)(implicit ec: ExecutionContext): LeafAction[Ctx, NewVal] = try PartialValue(fn(value), errors) catch { - case NonFatal(e) ⇒ TryValue(Failure(e)) + case NonFatal(e) => TryValue(Failure(e)) } } case class FutureValue[Ctx, Val](value: Future[Val]) extends LeafAction[Ctx, Val] with ReduceAction[Ctx, Val] { - override def map[NewVal](fn: Val ⇒ NewVal)(implicit ec: ExecutionContext): FutureValue[Ctx, NewVal] = + override def map[NewVal](fn: Val => NewVal)(implicit ec: ExecutionContext): FutureValue[Ctx, NewVal] = FutureValue(value map fn) } case class PartialFutureValue[Ctx, Val](value: Future[PartialValue[Ctx, Val]]) extends LeafAction[Ctx, Val] { - override def map[NewVal](fn: Val ⇒ NewVal)(implicit ec: ExecutionContext): PartialFutureValue[Ctx, NewVal] = + override def map[NewVal](fn: Val => NewVal)(implicit ec: ExecutionContext): PartialFutureValue[Ctx, NewVal] = PartialFutureValue(value map (_.map(fn) match { - case v: PartialValue[Ctx, NewVal] ⇒ v - case TryValue(Failure(e)) ⇒ throw e - case v ⇒ throw new IllegalStateException("Unexpected result from `PartialValue.map`: " + v) + case v: PartialValue[Ctx, NewVal] => v + case TryValue(Failure(e)) => throw e + case v => throw new IllegalStateException("Unexpected result from `PartialValue.map`: " + v) })) } case class DeferredValue[Ctx, Val](value: Deferred[Val]) extends LeafAction[Ctx, Val] { - override def map[NewVal](fn: Val ⇒ NewVal)(implicit ec: ExecutionContext): DeferredValue[Ctx, NewVal] = - DeferredValue(MappingDeferred(value, (v: Val) ⇒ (fn(v), Vector.empty))) + override def map[NewVal](fn: Val => NewVal)(implicit ec: ExecutionContext): DeferredValue[Ctx, NewVal] = + DeferredValue(MappingDeferred(value, (v: Val) => (fn(v), Vector.empty))) - def mapWithErrors[NewVal](fn: Val ⇒ (NewVal, Vector[Throwable]))(implicit ec: ExecutionContext): DeferredValue[Ctx, NewVal] = + def mapWithErrors[NewVal](fn: Val => (NewVal, Vector[Throwable]))(implicit ec: ExecutionContext): DeferredValue[Ctx, NewVal] = DeferredValue(MappingDeferred(value, fn)) } case class DeferredFutureValue[Ctx, Val](value: Future[Deferred[Val]]) extends LeafAction[Ctx, Val] { - override def map[NewVal](fn: Val ⇒ NewVal)(implicit ec: ExecutionContext): DeferredFutureValue[Ctx, NewVal] = - DeferredFutureValue(value map (MappingDeferred(_, (v: Val) ⇒ (fn(v), Vector.empty)))) + override def map[NewVal](fn: Val => NewVal)(implicit ec: ExecutionContext): DeferredFutureValue[Ctx, NewVal] = + DeferredFutureValue(value map (MappingDeferred(_, (v: Val) => (fn(v), Vector.empty)))) - def mapWithErrors[NewVal](fn: Val ⇒ (NewVal, Vector[Throwable]))(implicit ec: ExecutionContext): DeferredFutureValue[Ctx, NewVal] = + def mapWithErrors[NewVal](fn: Val => (NewVal, Vector[Throwable]))(implicit ec: ExecutionContext): DeferredFutureValue[Ctx, NewVal] = DeferredFutureValue(value map (MappingDeferred(_, fn))) } case class SequenceLeafAction[Ctx, Val](value: Seq[LeafAction[Ctx, Val]]) extends LeafAction[Ctx, Seq[Val]] { - override def map[NewVal](fn: Seq[Val] ⇒ NewVal)(implicit ec: ExecutionContext): MappedSequenceLeafAction[Ctx, Val, NewVal] = + override def map[NewVal](fn: Seq[Val] => NewVal)(implicit ec: ExecutionContext): MappedSequenceLeafAction[Ctx, Val, NewVal] = new MappedSequenceLeafAction[Ctx, Val, NewVal](this, fn) } -class MappedSequenceLeafAction[Ctx, Val, NewVal](val action: SequenceLeafAction[Ctx, Val], val mapFn: Seq[Val] ⇒ NewVal) extends LeafAction[Ctx, NewVal] { - override def map[NewNewVal](fn: NewVal ⇒ NewNewVal)(implicit ec: ExecutionContext): MappedSequenceLeafAction[Ctx, Val, NewNewVal] = - new MappedSequenceLeafAction[Ctx, Val, NewNewVal](action, v ⇒ fn(mapFn(v))) +class MappedSequenceLeafAction[Ctx, Val, NewVal](val action: SequenceLeafAction[Ctx, Val], val mapFn: Seq[Val] => NewVal) extends LeafAction[Ctx, NewVal] { + override def map[NewNewVal](fn: NewVal => NewNewVal)(implicit ec: ExecutionContext): MappedSequenceLeafAction[Ctx, Val, NewNewVal] = + new MappedSequenceLeafAction[Ctx, Val, NewNewVal](action, v => fn(mapFn(v))) } -class UpdateCtx[Ctx, Val](val action: LeafAction[Ctx, Val], val nextCtx: Val ⇒ Ctx) extends Action[Ctx, Val] { - override def map[NewVal](fn: Val ⇒ NewVal)(implicit ec: ExecutionContext): MappedUpdateCtx[Ctx, Val, NewVal] = +class UpdateCtx[Ctx, Val](val action: LeafAction[Ctx, Val], val nextCtx: Val => Ctx) extends Action[Ctx, Val] { + override def map[NewVal](fn: Val => NewVal)(implicit ec: ExecutionContext): MappedUpdateCtx[Ctx, Val, NewVal] = new MappedUpdateCtx[Ctx, Val, NewVal](action, nextCtx, fn) } -class MappedUpdateCtx[Ctx, Val, NewVal](val action: LeafAction[Ctx, Val], val nextCtx: Val ⇒ Ctx, val mapFn: Val ⇒ NewVal) extends Action[Ctx, NewVal] { - override def map[NewNewVal](fn: NewVal ⇒ NewNewVal)(implicit ec: ExecutionContext): MappedUpdateCtx[Ctx, Val, NewNewVal] = - new MappedUpdateCtx[Ctx, Val, NewNewVal](action, nextCtx, v ⇒ fn(mapFn(v))) +class MappedUpdateCtx[Ctx, Val, NewVal](val action: LeafAction[Ctx, Val], val nextCtx: Val => Ctx, val mapFn: Val => NewVal) extends Action[Ctx, NewVal] { + override def map[NewNewVal](fn: NewVal => NewNewVal)(implicit ec: ExecutionContext): MappedUpdateCtx[Ctx, Val, NewNewVal] = + new MappedUpdateCtx[Ctx, Val, NewNewVal](action, nextCtx, v => fn(mapFn(v))) } object UpdateCtx { - def apply[Ctx, Val](action: LeafAction[Ctx, Val])(newCtx: Val ⇒ Ctx): UpdateCtx[Ctx, Val] = new UpdateCtx(action, newCtx) + def apply[Ctx, Val](action: LeafAction[Ctx, Val])(newCtx: Val => Ctx): UpdateCtx[Ctx, Val] = new UpdateCtx(action, newCtx) } private[sangria] case class SubscriptionValue[Ctx, Val, S[_]](source: Val, stream: SubscriptionStream[S]) extends LeafAction[Ctx, Val] { - override def map[NewVal](fn: Val ⇒ NewVal)(implicit ec: ExecutionContext): SubscriptionValue[Ctx, NewVal, S] = + override def map[NewVal](fn: Val => NewVal)(implicit ec: ExecutionContext): SubscriptionValue[Ctx, NewVal, S] = throw new IllegalStateException("`map` is not supported subscription actions. Action is only intended for internal use.") } case class ProjectionName(name: String) extends FieldTag case object ProjectionExclude extends FieldTag -trait Projector[Ctx, Val, Res] extends (Context[Ctx, Val] ⇒ Action[Ctx, Res]) { +trait Projector[Ctx, Val, Res] extends (Context[Ctx, Val] => Action[Ctx, Res]) { val maxLevel: Int = Integer.MAX_VALUE def apply(ctx: Context[Ctx, Val], projected: Vector[ProjectedName]): Action[Ctx, Res] } object Projector { - def apply[Ctx, Val, Res](fn: (Context[Ctx, Val], Vector[ProjectedName]) ⇒ Action[Ctx, Res]) = + def apply[Ctx, Val, Res](fn: (Context[Ctx, Val], Vector[ProjectedName]) => Action[Ctx, Res]) = new Projector[Ctx, Val, Res] { def apply(ctx: Context[Ctx, Val], projected: Vector[ProjectedName]) = fn(ctx, projected) override def apply(ctx: Context[Ctx, Val]) = throw new IllegalStateException("Default apply should not be called on projector!") } - def apply[Ctx, Val, Res](levels: Int, fn: (Context[Ctx, Val], Vector[ProjectedName]) ⇒ Action[Ctx, Res]) = + def apply[Ctx, Val, Res](levels: Int, fn: (Context[Ctx, Val], Vector[ProjectedName]) => Action[Ctx, Res]) = new Projector[Ctx, Val, Res] { override val maxLevel = levels def apply(ctx: Context[Ctx, Val], projected: Vector[ProjectedName]) = fn(ctx, projected) @@ -167,7 +167,7 @@ case class ProjectedName(name: String, children: Vector[ProjectedName] = Vector. } } -case class MappingDeferred[A, +B](deferred: Deferred[A], mapFn: A ⇒ (B, Vector[Throwable])) extends Deferred[B] +case class MappingDeferred[A, +B](deferred: Deferred[A], mapFn: A => (B, Vector[Throwable])) extends Deferred[B] trait WithArguments { def args: Args @@ -181,14 +181,14 @@ trait WithArguments { def argDefinedInQuery(name: String): Boolean = args.argDefinedInQuery(name) def argDefinedInQuery(arg: Argument[_]): Boolean = args.argDefinedInQuery(arg) - def withArgs[A1, R](arg1: Argument[A1])(fn: A1 ⇒ R): R = args.withArgs(arg1)(fn) - def withArgs[A1, A2, R](arg1: Argument[A1], arg2: Argument[A2])(fn: (A1, A2) ⇒ R): R = args.withArgs(arg1, arg2)(fn) - def withArgs[A1, A2, A3, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3])(fn: (A1, A2, A3) ⇒ R): R = args.withArgs(arg1, arg2, arg3)(fn) - def withArgs[A1, A2, A3, A4, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4])(fn: (A1, A2, A3, A4) ⇒ R): R = args.withArgs(arg1, arg2, arg3, arg4)(fn) - def withArgs[A1, A2, A3, A4, A5, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4], arg5: Argument[A5])(fn: (A1, A2, A3, A4, A5) ⇒ R): R = args.withArgs(arg1, arg2, arg3, arg4, arg5)(fn) - def withArgs[A1, A2, A3, A4, A5, A6, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4], arg5: Argument[A5], arg6: Argument[A6])(fn: (A1, A2, A3, A4, A5, A6) ⇒ R): R = args.withArgs(arg1, arg2, arg3, arg4, arg5, arg6)(fn) - def withArgs[A1, A2, A3, A4, A5, A6, A7, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4], arg5: Argument[A5], arg6: Argument[A6], arg7: Argument[A7])(fn: (A1, A2, A3, A4, A5, A6, A7) ⇒ R): R = args.withArgs(arg1, arg2, arg3, arg4, arg5, arg6, arg7)(fn) - def withArgs[A1, A2, A3, A4, A5, A6, A7, A8, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4], arg5: Argument[A5], arg6: Argument[A6], arg7: Argument[A7], arg8: Argument[A8])(fn: (A1, A2, A3, A4, A5, A6, A7, A8) ⇒ R): R = args.withArgs(arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8)(fn) + def withArgs[A1, R](arg1: Argument[A1])(fn: A1 => R): R = args.withArgs(arg1)(fn) + def withArgs[A1, A2, R](arg1: Argument[A1], arg2: Argument[A2])(fn: (A1, A2) => R): R = args.withArgs(arg1, arg2)(fn) + def withArgs[A1, A2, A3, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3])(fn: (A1, A2, A3) => R): R = args.withArgs(arg1, arg2, arg3)(fn) + def withArgs[A1, A2, A3, A4, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4])(fn: (A1, A2, A3, A4) => R): R = args.withArgs(arg1, arg2, arg3, arg4)(fn) + def withArgs[A1, A2, A3, A4, A5, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4], arg5: Argument[A5])(fn: (A1, A2, A3, A4, A5) => R): R = args.withArgs(arg1, arg2, arg3, arg4, arg5)(fn) + def withArgs[A1, A2, A3, A4, A5, A6, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4], arg5: Argument[A5], arg6: Argument[A6])(fn: (A1, A2, A3, A4, A5, A6) => R): R = args.withArgs(arg1, arg2, arg3, arg4, arg5, arg6)(fn) + def withArgs[A1, A2, A3, A4, A5, A6, A7, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4], arg5: Argument[A5], arg6: Argument[A6], arg7: Argument[A7])(fn: (A1, A2, A3, A4, A5, A6, A7) => R): R = args.withArgs(arg1, arg2, arg3, arg4, arg5, arg6, arg7)(fn) + def withArgs[A1, A2, A3, A4, A5, A6, A7, A8, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4], arg5: Argument[A5], arg6: Argument[A6], arg7: Argument[A7], arg8: Argument[A8])(fn: (A1, A2, A3, A4, A5, A6, A7, A8) => R): R = args.withArgs(arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8)(fn) } trait WithInputTypeRendering[Ctx] { @@ -227,8 +227,8 @@ object DefaultValueRenderer { None else coercionHelper.coerceInputValue(tpe, Nil, inputValue, None, None, CoercedScalaResultMarshaller.default, CoercedScalaResultMarshaller.default, isArgument = false)(iu) match { - case Right(Trinary.Defined(coerced)) ⇒ Some(renderCoercedInputValue(tpe, coerced)) - case _ ⇒ None + case Right(Trinary.Defined(coerced)) => Some(renderCoercedInputValue(tpe, coerced)) + case _ => None } } @@ -239,28 +239,28 @@ object DefaultValueRenderer { marshaller.renderPretty(renderCoercedInputValue(tpe, value)) def renderCoercedInputValue(t: InputType[_], v: Any): marshaller.Node = t match { - case _ if v == null ⇒ marshaller.nullNode - case s: ScalarType[Any @unchecked] ⇒ Resolver.marshalScalarValue(s.coerceOutput(v, marshaller.capabilities), marshaller, s.name, s.scalarInfo) - case s: ScalarAlias[Any @unchecked, Any @unchecked] ⇒ renderCoercedInputValue(s.aliasFor, s.toScalar(v)) - case e: EnumType[Any @unchecked] ⇒ Resolver.marshalEnumValue(e.coerceOutput(v), marshaller, e.name) - case io: InputObjectType[_] ⇒ + case _ if v == null => marshaller.nullNode + case s: ScalarType[Any @unchecked] => Resolver.marshalScalarValue(s.coerceOutput(v, marshaller.capabilities), marshaller, s.name, s.scalarInfo) + case s: ScalarAlias[Any @unchecked, Any @unchecked] => renderCoercedInputValue(s.aliasFor, s.toScalar(v)) + case e: EnumType[Any @unchecked] => Resolver.marshalEnumValue(e.coerceOutput(v), marshaller, e.name) + case io: InputObjectType[_] => val mapValue = v.asInstanceOf[Map[String, Any]] val builder = io.fields.foldLeft(marshaller.emptyMapNode(io.fields.map(_.name))) { - case (acc, field) if mapValue contains field.name ⇒ + case (acc, field) if mapValue contains field.name => marshaller.addMapNodeElem(acc, field.name, renderCoercedInputValue(field.fieldType, mapValue(field.name)), optional = false) - case (acc, _) ⇒ acc + case (acc, _) => acc } marshaller.mapNode(builder) - case l: ListInputType[_] ⇒ + case l: ListInputType[_] => val listValue = v.asInstanceOf[Seq[Any]] marshaller.mapAndMarshal[Any](listValue, renderCoercedInputValue(l.ofType, _)) - case o: OptionInputType[_] ⇒ v match { - case Some(optVal) ⇒ renderCoercedInputValue(o.ofType, optVal) - case None ⇒ marshaller.nullNode - case other ⇒ renderCoercedInputValue(o.ofType, other) + case o: OptionInputType[_] => v match { + case Some(optVal) => renderCoercedInputValue(o.ofType, optVal) + case None => marshaller.nullNode + case other => renderCoercedInputValue(o.ofType, other) } } } @@ -286,13 +286,13 @@ case class Context[Ctx, Val]( def attachment[T <: MiddlewareAttachment : ClassTag]: Option[T] = { val clazz = implicitly[ClassTag[T]].runtimeClass - middlewareAttachments.collectFirst {case a if clazz isAssignableFrom a.getClass ⇒ a.asInstanceOf[T]} + middlewareAttachments.collectFirst {case a if clazz isAssignableFrom a.getClass => a.asInstanceOf[T]} } def attachments[T <: MiddlewareAttachment : ClassTag]: Vector[T] = { val clazz = implicitly[ClassTag[T]].runtimeClass - middlewareAttachments.collect {case a if clazz isAssignableFrom a.getClass ⇒ a.asInstanceOf[T]} + middlewareAttachments.collect {case a if clazz isAssignableFrom a.getClass => a.asInstanceOf[T]} } } @@ -337,14 +337,14 @@ case class Args(raw: Map[String, Any], argsWithDefault: Set[String], optionalArg def argDefinedInQuery(name: String): Boolean = !undefinedArgs.contains(name) def argDefinedInQuery(arg: Argument[_]): Boolean = argDefinedInQuery(arg.name) - def withArgs[A1, R](arg1: Argument[A1])(fn: A1 ⇒ R): R = fn(arg(arg1)) - def withArgs[A1, A2, R](arg1: Argument[A1], arg2: Argument[A2])(fn: (A1, A2) ⇒ R): R = fn(arg(arg1), arg(arg2)) - def withArgs[A1, A2, A3, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3])(fn: (A1, A2, A3) ⇒ R): R = fn(arg(arg1), arg(arg2), arg(arg3)) - def withArgs[A1, A2, A3, A4, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4])(fn: (A1, A2, A3, A4) ⇒ R): R = fn(arg(arg1), arg(arg2), arg(arg3), arg(arg4)) - def withArgs[A1, A2, A3, A4, A5, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4], arg5: Argument[A5])(fn: (A1, A2, A3, A4, A5) ⇒ R): R = fn(arg(arg1), arg(arg2), arg(arg3), arg(arg4), arg(arg5)) - def withArgs[A1, A2, A3, A4, A5, A6, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4], arg5: Argument[A5], arg6: Argument[A6])(fn: (A1, A2, A3, A4, A5, A6) ⇒ R): R = fn(arg(arg1), arg(arg2), arg(arg3), arg(arg4), arg(arg5), arg(arg6)) - def withArgs[A1, A2, A3, A4, A5, A6, A7, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4], arg5: Argument[A5], arg6: Argument[A6], arg7: Argument[A7])(fn: (A1, A2, A3, A4, A5, A6, A7) ⇒ R): R = fn(arg(arg1), arg(arg2), arg(arg3), arg(arg4), arg(arg5), arg(arg6), arg(arg7)) - def withArgs[A1, A2, A3, A4, A5, A6, A7, A8, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4], arg5: Argument[A5], arg6: Argument[A6], arg7: Argument[A7], arg8: Argument[A8])(fn: (A1, A2, A3, A4, A5, A6, A7, A8) ⇒ R): R = fn(arg(arg1), arg(arg2), arg(arg3), arg(arg4), arg(arg5), arg(arg6), arg(arg7), arg(arg8)) + def withArgs[A1, R](arg1: Argument[A1])(fn: A1 => R): R = fn(arg(arg1)) + def withArgs[A1, A2, R](arg1: Argument[A1], arg2: Argument[A2])(fn: (A1, A2) => R): R = fn(arg(arg1), arg(arg2)) + def withArgs[A1, A2, A3, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3])(fn: (A1, A2, A3) => R): R = fn(arg(arg1), arg(arg2), arg(arg3)) + def withArgs[A1, A2, A3, A4, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4])(fn: (A1, A2, A3, A4) => R): R = fn(arg(arg1), arg(arg2), arg(arg3), arg(arg4)) + def withArgs[A1, A2, A3, A4, A5, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4], arg5: Argument[A5])(fn: (A1, A2, A3, A4, A5) => R): R = fn(arg(arg1), arg(arg2), arg(arg3), arg(arg4), arg(arg5)) + def withArgs[A1, A2, A3, A4, A5, A6, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4], arg5: Argument[A5], arg6: Argument[A6])(fn: (A1, A2, A3, A4, A5, A6) => R): R = fn(arg(arg1), arg(arg2), arg(arg3), arg(arg4), arg(arg5), arg(arg6)) + def withArgs[A1, A2, A3, A4, A5, A6, A7, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4], arg5: Argument[A5], arg6: Argument[A6], arg7: Argument[A7])(fn: (A1, A2, A3, A4, A5, A6, A7) => R): R = fn(arg(arg1), arg(arg2), arg(arg3), arg(arg4), arg(arg5), arg(arg6), arg(arg7)) + def withArgs[A1, A2, A3, A4, A5, A6, A7, A8, R](arg1: Argument[A1], arg2: Argument[A2], arg3: Argument[A3], arg4: Argument[A4], arg5: Argument[A5], arg6: Argument[A6], arg7: Argument[A7], arg8: Argument[A8])(fn: (A1, A2, A3, A4, A5, A6, A7, A8) => R): R = fn(arg(arg1), arg(arg2), arg(arg3), arg(arg4), arg(arg5), arg(arg6), arg(arg7), arg(arg8)) } object Args { @@ -365,9 +365,9 @@ object Args { throw new IllegalArgumentException("The input expected to be a map-like data structure") } else { val argsValues = - iu.getMapKeys(input).flatMap(key ⇒ definitions.find(_.name == key)).map { arg ⇒ + iu.getMapKeys(input).flatMap(key => definitions.find(_.name == key)).map { arg => val astValue = iu.getRootMapValue(input, arg.name) - .flatMap(x ⇒ this.convert[In, ast.Value](x, arg.argumentType)) + .flatMap(x => this.convert[In, ast.Value](x, arg.argumentType)) ast.Argument(name = arg.name, value = astValue getOrElse ast.NullValue()) } @@ -381,15 +381,15 @@ object Args { apply( schemaElem.arguments, - ast.ObjectValue(astElem.arguments.map(arg ⇒ ast.ObjectField(arg.name, arg.value))): ast.Value) + ast.ObjectValue(astElem.arguments.map(arg => ast.ObjectField(arg.name, arg.value))): ast.Value) } private def convert[In: InputUnmarshaller, Out: ResultMarshallerForType](value: In, tpe: InputType[_]): Option[Out] = { val rm = implicitly[ResultMarshallerForType[Out]] ValueCoercionHelper.default.coerceInputValue(tpe, List("stub"), value, None, None, rm.marshaller, rm.marshaller, isArgument = false) match { - case Right(v) ⇒ v.toOption.asInstanceOf[Option[Out]] - case Left(violations) ⇒ throw AttributeCoercionError(violations, ExceptionHandler.empty) + case Right(v) => v.toOption.asInstanceOf[Option[Out]] + case Left(violations) => throw AttributeCoercionError(violations, ExceptionHandler.empty) } } } diff --git a/src/main/scala/sangria/schema/IntrospectionSchemaBuilder.scala b/src/main/scala/sangria/schema/IntrospectionSchemaBuilder.scala index a1c11127..797cb4ba 100644 --- a/src/main/scala/sangria/schema/IntrospectionSchemaBuilder.scala +++ b/src/main/scala/sangria/schema/IntrospectionSchemaBuilder.scala @@ -23,18 +23,18 @@ trait IntrospectionSchemaBuilder[Ctx] { def buildObjectType( definition: IntrospectionObjectType, - fields: () ⇒ List[Field[Ctx, Any]], + fields: () => List[Field[Ctx, Any]], interfaces: List[InterfaceType[Ctx, Any]], mat: IntrospectionSchemaMaterializer[Ctx, _]): Option[ObjectType[Ctx, Any]] def buildInputObjectType( definition: IntrospectionInputObjectType, - fields: () ⇒ List[InputField[_]], + fields: () => List[InputField[_]], mat: IntrospectionSchemaMaterializer[Ctx, _]): Option[InputObjectType[InputObjectType.DefaultInput]] def buildInterfaceType( definition: IntrospectionInterfaceType, - fields: () ⇒ List[Field[Ctx, Any]], + fields: () => List[Field[Ctx, Any]], mat: IntrospectionSchemaMaterializer[Ctx, _]): Option[InterfaceType[Ctx, Any]] def buildUnionType( @@ -109,21 +109,21 @@ class DefaultIntrospectionSchemaBuilder[Ctx] extends IntrospectionSchemaBuilder[ def buildObjectType( definition: IntrospectionObjectType, - fields: () ⇒ List[Field[Ctx, Any]], + fields: () => List[Field[Ctx, Any]], interfaces: List[InterfaceType[Ctx, Any]], mat: IntrospectionSchemaMaterializer[Ctx, _]) = { val objectType = objectTypeInstanceCheck(definition) match { - case Some(fn) ⇒ + case Some(fn) => ObjectType[Ctx, Any]( name = typeName(definition), description = typeDescription(definition), fieldsFn = fields, interfaces = interfaces, - instanceCheck = (value: Any, clazz: Class[_], _: ObjectType[Ctx, Any]) ⇒ fn(value, clazz), + instanceCheck = (value: Any, clazz: Class[_], _: ObjectType[Ctx, Any]) => fn(value, clazz), astDirectives = Vector.empty, astNodes = Vector.empty) - case None ⇒ + case None => ObjectType[Ctx, Any]( name = typeName(definition), description = typeDescription(definition), @@ -139,7 +139,7 @@ class DefaultIntrospectionSchemaBuilder[Ctx] extends IntrospectionSchemaBuilder[ def buildInputObjectType( definition: IntrospectionInputObjectType, - fields: () ⇒ List[InputField[_]], + fields: () => List[InputField[_]], mat: IntrospectionSchemaMaterializer[Ctx, _]) = Some(InputObjectType( name = typeName(definition), @@ -150,14 +150,14 @@ class DefaultIntrospectionSchemaBuilder[Ctx] extends IntrospectionSchemaBuilder[ def buildInterfaceType( definition: IntrospectionInterfaceType, - fields: () ⇒ List[Field[Ctx, Any]], + fields: () => List[Field[Ctx, Any]], mat: IntrospectionSchemaMaterializer[Ctx, _]) = Some(InterfaceType[Ctx, Any]( name = typeName(definition), description = typeDescription(definition), fieldsFn = fields, interfaces = Nil, - manualPossibleTypes = () ⇒ Nil, + manualPossibleTypes = () => Nil, astDirectives = Vector.empty, astNodes = Vector.empty)) @@ -216,7 +216,7 @@ class DefaultIntrospectionSchemaBuilder[Ctx] extends IntrospectionSchemaBuilder[ tags = fieldTags(typeDefinition, definition), deprecationReason = fieldDeprecationReason(definition), complexity = fieldComplexity(typeDefinition, definition), - manualPossibleTypes = () ⇒ Nil, + manualPossibleTypes = () => Nil, astDirectives = Vector.empty, astNodes = Vector.empty)) @@ -260,29 +260,29 @@ class DefaultIntrospectionSchemaBuilder[Ctx] extends IntrospectionSchemaBuilder[ arguments = arguments, shouldInclude = directiveShouldInclude(definition))) - def objectTypeInstanceCheck(definition: IntrospectionObjectType): Option[(Any, Class[_]) ⇒ Boolean] = + def objectTypeInstanceCheck(definition: IntrospectionObjectType): Option[(Any, Class[_]) => Boolean] = None - def directiveShouldInclude(definition: IntrospectionDirective): DirectiveContext ⇒ Boolean = + def directiveShouldInclude(definition: IntrospectionDirective): DirectiveContext => Boolean = Function.const(true) def argumentFromInput(fieldDefinition: Option[IntrospectionField], definition: IntrospectionInputValue) = FromInput.defaultInput[Any] - def resolveField(typeDefinition: IntrospectionType, definition: IntrospectionField): Context[Ctx, _] ⇒ Action[Ctx, _] = - (ctx) ⇒ throw DefaultIntrospectionSchemaBuilder.MaterializedSchemaException + def resolveField(typeDefinition: IntrospectionType, definition: IntrospectionField): Context[Ctx, _] => Action[Ctx, _] = + (ctx) => throw DefaultIntrospectionSchemaBuilder.MaterializedSchemaException def fieldTags(typeDefinition: IntrospectionType, definition: IntrospectionField): List[FieldTag] = Nil - def scalarCoerceUserInput(definition: IntrospectionScalarType): Any ⇒ Either[Violation, Any] = - _ ⇒ Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) + def scalarCoerceUserInput(definition: IntrospectionScalarType): Any => Either[Violation, Any] = + _ => Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) - def scalarCoerceInput(definition: IntrospectionScalarType): ast.Value ⇒ Either[Violation, Any] = - _ ⇒ Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) + def scalarCoerceInput(definition: IntrospectionScalarType): ast.Value => Either[Violation, Any] = + _ => Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) - def scalarCoerceOutput(definition: IntrospectionScalarType): (Any, Set[MarshallerCapability]) ⇒ Any = - (_, _) ⇒ throw DefaultIntrospectionSchemaBuilder.MaterializedSchemaException + def scalarCoerceOutput(definition: IntrospectionScalarType): (Any, Set[MarshallerCapability]) => Any = + (_, _) => throw DefaultIntrospectionSchemaBuilder.MaterializedSchemaException def scalarValueInfo(definition: IntrospectionScalarType): Set[ScalarValueInfo] = Set.empty @@ -290,7 +290,7 @@ class DefaultIntrospectionSchemaBuilder[Ctx] extends IntrospectionSchemaBuilder[ def scalarComplexity(definition: IntrospectionScalarType): Double = 0.0D - def fieldComplexity(typeDefinition: IntrospectionType, definition: IntrospectionField): Option[(Ctx, Args, Double) ⇒ Double] = + def fieldComplexity(typeDefinition: IntrospectionType, definition: IntrospectionField): Option[(Ctx, Args, Double) => Double] = None def directiveName(definition: IntrospectionDirective): String = @@ -338,7 +338,7 @@ class DefaultIntrospectionSchemaBuilder[Ctx] extends IntrospectionSchemaBuilder[ def enumValueDeprecationReason(definition: IntrospectionEnumValue): Option[String] = definition.deprecationReason orElse (if (definition.isDeprecated) Some(DefaultDeprecationReason) else None) - def defaultValueParser: Option[String ⇒ Try[(Any, InputUnmarshaller[Any])]] = + def defaultValueParser: Option[String => Try[(Any, InputUnmarshaller[Any])]] = None } @@ -349,4 +349,4 @@ object DefaultIntrospectionSchemaBuilder { case object MaterializedSchemaViolation extends Violation { val errorMessage = MaterializedSchemaErrorMessage } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/schema/IntrospectionSchemaMaterializer.scala b/src/main/scala/sangria/schema/IntrospectionSchemaMaterializer.scala index f9c04bcb..eb6c2afb 100644 --- a/src/main/scala/sangria/schema/IntrospectionSchemaMaterializer.scala +++ b/src/main/scala/sangria/schema/IntrospectionSchemaMaterializer.scala @@ -17,7 +17,7 @@ class IntrospectionSchemaMaterializer[Ctx, T : InputUnmarshaller](introspectionR val queryType = getObjectType(schemaDef.queryType) val mutationType = schemaDef.mutationType map getObjectType val subscriptionType = schemaDef.subscriptionType map getObjectType - val directives = (schemaDef.directives.toList ++ builder.additionalDirectiveDefs) filterNot (d ⇒ Schema.isBuiltInDirective(d.name)) flatMap buildDirective + val directives = (schemaDef.directives.toList ++ builder.additionalDirectiveDefs) filterNot (d => Schema.isBuiltInDirective(d.name)) flatMap buildDirective builder.buildSchema(schemaDef, queryType, mutationType, subscriptionType, findUnusedTypes(schemaDef.types ++ builder.additionalTypeDefs), @@ -28,15 +28,15 @@ class IntrospectionSchemaMaterializer[Ctx, T : InputUnmarshaller](introspectionR def findUnusedTypes(allTypes: Seq[IntrospectionType]): List[Type with Named] = { // first init all lazy fields. TODO: think about better solution typeDefCache.forEachValue { - case o: ObjectLikeType[_, _] ⇒ o.fields - case o: InputObjectType[_] ⇒ o.fields - case _ ⇒ // do nothing + case o: ObjectLikeType[_, _] => o.fields + case o: InputObjectType[_] => o.fields + case _ => // do nothing } val referenced = typeDefCache - val notReferenced = allTypes.filterNot(tpe ⇒ Schema.isBuiltInType(tpe.name) || referenced.contains(tpe.name)) + val notReferenced = allTypes.filterNot(tpe => Schema.isBuiltInType(tpe.name) || referenced.contains(tpe.name)) - notReferenced.toList map (tpe ⇒ getNamedType(tpe.name)) + notReferenced.toList map (tpe => getNamedType(tpe.name)) } def buildDirective(directive: IntrospectionDirective) = @@ -45,39 +45,39 @@ class IntrospectionSchemaMaterializer[Ctx, T : InputUnmarshaller](introspectionR def getObjectType(typeRef: IntrospectionTypeRef): ObjectType[Ctx, Any] = getOutputType(typeRef, false) match { - case obj: ObjectType[_, _] ⇒ obj.asInstanceOf[ObjectType[Ctx, Any]] - case _ ⇒ throw new SchemaMaterializationException(s"Type '${SchemaRenderer.renderTypeName(typeRef)}' is not an object type.") + case obj: ObjectType[_, _] => obj.asInstanceOf[ObjectType[Ctx, Any]] + case _ => throw new SchemaMaterializationException(s"Type '${SchemaRenderer.renderTypeName(typeRef)}' is not an object type.") } def getInterfaceType(typeRef: IntrospectionTypeRef) = getOutputType(typeRef, false) match { - case obj: InterfaceType[_, _] ⇒ obj.asInstanceOf[InterfaceType[Ctx, Any]] - case _ ⇒ throw new SchemaMaterializationException(s"Type '${SchemaRenderer.renderTypeName(typeRef)}' is not an interface type.") + case obj: InterfaceType[_, _] => obj.asInstanceOf[InterfaceType[Ctx, Any]] + case _ => throw new SchemaMaterializationException(s"Type '${SchemaRenderer.renderTypeName(typeRef)}' is not an interface type.") } def getInputType(typeRef: IntrospectionTypeRef, optional: Boolean = true): InputType[_] = typeRef match { - case IntrospectionListTypeRef(ofType) if optional ⇒ OptionInputType(ListInputType(getInputType(ofType, true))) - case IntrospectionListTypeRef(ofType) ⇒ ListInputType(getInputType(ofType, true)) - case IntrospectionNonNullTypeRef(ofType) ⇒ getInputType(ofType, false) - case IntrospectionNamedTypeRef(_, name) ⇒ + case IntrospectionListTypeRef(ofType) if optional => OptionInputType(ListInputType(getInputType(ofType, true))) + case IntrospectionListTypeRef(ofType) => ListInputType(getInputType(ofType, true)) + case IntrospectionNonNullTypeRef(ofType) => getInputType(ofType, false) + case IntrospectionNamedTypeRef(_, name) => getNamedType(name) match { - case input: InputType[_] if optional ⇒ OptionInputType(input) - case input: InputType[_] ⇒ input - case _ ⇒ throw new SchemaMaterializationException(s"Type '$name' is not an input type, but was used in input type position!") + case input: InputType[_] if optional => OptionInputType(input) + case input: InputType[_] => input + case _ => throw new SchemaMaterializationException(s"Type '$name' is not an input type, but was used in input type position!") } } def getOutputType(typeRef: IntrospectionTypeRef, optional: Boolean = true): OutputType[_] = typeRef match { - case IntrospectionListTypeRef(ofType) if optional ⇒ OptionType(ListType(getOutputType(ofType, true))) - case IntrospectionListTypeRef(ofType) ⇒ ListType(getOutputType(ofType, true)) - case IntrospectionNonNullTypeRef(ofType) ⇒ getOutputType(ofType, false) - case IntrospectionNamedTypeRef(_, name) ⇒ + case IntrospectionListTypeRef(ofType) if optional => OptionType(ListType(getOutputType(ofType, true))) + case IntrospectionListTypeRef(ofType) => ListType(getOutputType(ofType, true)) + case IntrospectionNonNullTypeRef(ofType) => getOutputType(ofType, false) + case IntrospectionNamedTypeRef(_, name) => getNamedType(name) match { - case input: OutputType[_] if optional ⇒ OptionType(input) - case input: OutputType[_] ⇒ input - case _ ⇒ throw new SchemaMaterializationException(s"Type '$name' is not an output type, but was used in output type position!") + case input: OutputType[_] if optional => OptionType(input) + case input: OutputType[_] => input + case _ => throw new SchemaMaterializationException(s"Type '$name' is not an output type, but was used in output type position!") } } @@ -88,28 +88,28 @@ class IntrospectionSchemaMaterializer[Ctx, T : InputUnmarshaller](introspectionR s"Invalid or incomplete schema, unknown type: $typeName. Ensure that a full introspection query is used in order to build a client schema.")))) def buildType(tpe: IntrospectionType): Option[Type with Named] = tpe match { - case o: IntrospectionObjectType ⇒ buildObjectDef(o) - case i: IntrospectionInterfaceType ⇒ buildInterfaceDef(i) - case u: IntrospectionUnionType ⇒ buildUnionDef(u) - case io: IntrospectionInputObjectType ⇒ buildInputObjectDef(io) - case s: IntrospectionScalarType ⇒ buildScalarDef(s) - case e: IntrospectionEnumType ⇒ buildEnumDef(e) + case o: IntrospectionObjectType => buildObjectDef(o) + case i: IntrospectionInterfaceType => buildInterfaceDef(i) + case u: IntrospectionUnionType => buildUnionDef(u) + case io: IntrospectionInputObjectType => buildInputObjectDef(io) + case s: IntrospectionScalarType => buildScalarDef(s) + case e: IntrospectionEnumType => buildEnumDef(e) } def buildField(typeDef: IntrospectionType, field: IntrospectionField) = builder.buildField(typeDef, field, getOutputType(field.tpe), field.args.toList flatMap (buildArgument(Some(field), _)), this) def buildObjectDef(tpe: IntrospectionObjectType) = - builder.buildObjectType(tpe, () ⇒ tpe.fields.toList flatMap (buildField(tpe, _)), tpe.interfaces.toList map getInterfaceType, this) + builder.buildObjectType(tpe, () => tpe.fields.toList flatMap (buildField(tpe, _)), tpe.interfaces.toList map getInterfaceType, this) def buildInterfaceDef(tpe: IntrospectionInterfaceType) = - builder.buildInterfaceType(tpe, () ⇒ tpe.fields.toList flatMap (buildField(tpe, _)), this) + builder.buildInterfaceType(tpe, () => tpe.fields.toList flatMap (buildField(tpe, _)), this) def buildUnionDef(tpe: IntrospectionUnionType) = builder.buildUnionType(tpe, tpe.possibleTypes.toList map getObjectType, this) def buildInputObjectDef(tpe: IntrospectionInputObjectType) = - builder.buildInputObjectType(tpe, () ⇒ tpe.inputFields.toList flatMap (buildInputField(tpe, _)), this) + builder.buildInputObjectType(tpe, () => tpe.inputFields.toList flatMap (buildInputField(tpe, _)), this) def buildScalarDef(tpe: IntrospectionScalarType) = builder.buildScalarType(tpe, this) @@ -121,9 +121,9 @@ class IntrospectionSchemaMaterializer[Ctx, T : InputUnmarshaller](introspectionR builder.buildEnumValue(tpe, value, this) def buildDefault(defaultValue: Option[String]) = - defaultValue map (dv ⇒ sangria.marshalling.queryAst.QueryAstInputParser.parse(dv) match { - case Success(parsed) ⇒ parsed → sangria.marshalling.queryAst.queryAstToInput - case Failure(error) ⇒ throw new SchemaMaterializationException(s"Unable to parse default value '$dv'.", error) + defaultValue map (dv => sangria.marshalling.queryAst.QueryAstInputParser.parse(dv) match { + case Success(parsed) => parsed -> sangria.marshalling.queryAst.queryAstToInput + case Failure(error) => throw new SchemaMaterializationException(s"Unable to parse default value '$dv'.", error) }) def buildArgument(fieldDef: Option[IntrospectionField], value: IntrospectionInputValue) = diff --git a/src/main/scala/sangria/schema/ResolverBasedAstSchemaBuilder.scala b/src/main/scala/sangria/schema/ResolverBasedAstSchemaBuilder.scala index 51e9642c..20357e1d 100644 --- a/src/main/scala/sangria/schema/ResolverBasedAstSchemaBuilder.scala +++ b/src/main/scala/sangria/schema/ResolverBasedAstSchemaBuilder.scala @@ -13,19 +13,19 @@ import scala.collection.immutable.VectorBuilder import scala.util.control.NonFatal class ResolverBasedAstSchemaBuilder[Ctx](val resolvers: Seq[AstSchemaResolver[Ctx]]) extends DefaultAstSchemaBuilder[Ctx] { - protected lazy val directiveResolvers = resolvers collect {case dr: DirectiveResolver[Ctx] ⇒ dr} - protected lazy val directiveScalarResolvers = resolvers collect {case dr: DirectiveScalarResolver[Ctx] ⇒ dr} - protected lazy val directiveInpResolvers = resolvers collect {case dr: DirectiveInputTypeResolver[Ctx] ⇒ dr} - protected lazy val directiveOutResolvers = resolvers collect {case dr: DirectiveOutputTypeResolver[Ctx] ⇒ dr} - protected lazy val directiveProviderDirs = resolvers collect {case dr: DirectiveFieldProvider[Ctx] ⇒ dr.directive} - protected lazy val directiveDynProviderDirNames = resolvers collect {case dr: DynamicDirectiveFieldProvider[Ctx, _] ⇒ dr.directiveName} + protected lazy val directiveResolvers = resolvers collect {case dr: DirectiveResolver[Ctx] => dr} + protected lazy val directiveScalarResolvers = resolvers collect {case dr: DirectiveScalarResolver[Ctx] => dr} + protected lazy val directiveInpResolvers = resolvers collect {case dr: DirectiveInputTypeResolver[Ctx] => dr} + protected lazy val directiveOutResolvers = resolvers collect {case dr: DirectiveOutputTypeResolver[Ctx] => dr} + protected lazy val directiveProviderDirs = resolvers collect {case dr: DirectiveFieldProvider[Ctx] => dr.directive} + protected lazy val directiveDynProviderDirNames = resolvers collect {case dr: DynamicDirectiveFieldProvider[Ctx, _] => dr.directiveName} protected lazy val additionalDirectives = resolvers flatMap { - case AdditionalDirectives(ad) ⇒ ad - case _ ⇒ Nil + case AdditionalDirectives(ad) => ad + case _ => Nil } protected lazy val dynamicDirectiveNames = - resolvers.collect{case dr: DynamicDirectiveResolver[Ctx, _] ⇒ dr.directiveName}.toSet ++ + resolvers.collect{case dr: DynamicDirectiveResolver[Ctx, _] => dr.directiveName}.toSet ++ directiveDynProviderDirNames protected lazy val directives = @@ -36,15 +36,15 @@ class ResolverBasedAstSchemaBuilder[Ctx](val resolvers: Seq[AstSchemaResolver[Ct additionalDirectives ++ directiveProviderDirs - protected lazy val stubQueryType = ObjectType("Query", fields[Unit, Unit](Field("stub", StringType, resolve = _ ⇒ "stub"))) + protected lazy val stubQueryType = ObjectType("Query", fields[Unit, Unit](Field("stub", StringType, resolve = _ => "stub"))) protected lazy val validationSchema = Schema(stubQueryType, directives = directives.toList ++ BuiltinDirectives) override def useLegacyCommentDescriptions: Boolean = resolvers.exists(_.isInstanceOf[LegacyCommentDescriptionsResolver[Ctx]]) override lazy val additionalTypes: List[MaterializedType] = resolvers.flatMap { - case AdditionalTypes(at) ⇒ at - case _ ⇒ Nil + case AdditionalTypes(at) => at + case _ => Nil }.toList def validateSchema(schema: ast.Document, validator: QueryValidator = ResolverBasedAstSchemaBuilder.validator): Vector[Violation] = @@ -59,33 +59,33 @@ class ResolverBasedAstSchemaBuilder[Ctx](val resolvers: Seq[AstSchemaResolver[Ct protected def allowKnownDynamicDirectives(violations: Vector[Violation]) = violations.filterNot { - case UnknownDirectiveViolation(name, _, _) if dynamicDirectiveNames.contains(name) ⇒ true - case _ ⇒ false + case UnknownDirectiveViolation(name, _, _) if dynamicDirectiveNames.contains(name) => true + case _ => false } protected def findResolver(directive: ast.Directive): Option[(ast.Directive, AstSchemaResolver[Ctx])] = resolvers.collectFirst { - case r @ DirectiveResolver(d, _, _) if d.name == directive.name ⇒ directive → r - case r @ DynamicDirectiveResolver(directive.name, _, _) ⇒ directive → r + case r @ DirectiveResolver(d, _, _) if d.name == directive.name => directive -> r + case r @ DynamicDirectiveResolver(directive.name, _, _) => directive -> r } protected def findComplexityResolver(directive: ast.Directive): Option[(ast.Directive, AstSchemaResolver[Ctx])] = resolvers.collectFirst { - case r @ DirectiveResolver(d, _, _) if d.name == directive.name && r.complexity.isDefined ⇒ directive → r - case r @ DynamicDirectiveResolver(directive.name, _, _) if r.complexity.isDefined ⇒ directive → r + case r @ DirectiveResolver(d, _, _) if d.name == directive.name && r.complexity.isDefined => directive -> r + case r @ DynamicDirectiveResolver(directive.name, _, _) if r.complexity.isDefined => directive -> r } protected def findResolver(typeDefinition: Either[ast.TypeDefinition, ObjectLikeType[Ctx, _]], definition: ast.FieldDefinition): Option[FieldResolver[Ctx]] = { - val arg = typeDefinition → definition + val arg = typeDefinition -> definition resolvers.collectFirst { - case r @ FieldResolver(fn, _) if fn.isDefinedAt(arg) ⇒ r + case r @ FieldResolver(fn, _) if fn.isDefinedAt(arg) => r } } protected def findComplexityResolver(typeDefinition: Either[ast.TypeDefinition, ObjectLikeType[Ctx, _]], definition: ast.FieldDefinition): Option[FieldResolver[Ctx]] = { - val arg = typeDefinition → definition + val arg = typeDefinition -> definition resolvers.collectFirst { - case r @ FieldResolver(fn, _) if fn.isDefinedAt(arg) && r.complexity.isDefinedAt(arg) ⇒ r + case r @ FieldResolver(fn, _) if fn.isDefinedAt(arg) && r.complexity.isDefinedAt(arg) => r } } @@ -93,13 +93,13 @@ class ResolverBasedAstSchemaBuilder[Ctx](val resolvers: Seq[AstSchemaResolver[Ct val arg = (origin, typeDefinition, field) resolvers.collectFirst { - case r @ ExistingFieldResolver(fn) if fn.isDefinedAt(arg) ⇒ r + case r @ ExistingFieldResolver(fn) if fn.isDefinedAt(arg) => r } } protected def findAnyResolver(origin: MatOrigin): Option[AnyFieldResolver[Ctx]] = resolvers.collectFirst { - case r @ AnyFieldResolver(fn) if fn.isDefinedAt(origin) ⇒ r + case r @ AnyFieldResolver(fn) if fn.isDefinedAt(origin) => r } override def resolveField( @@ -108,41 +108,41 @@ class ResolverBasedAstSchemaBuilder[Ctx](val resolvers: Seq[AstSchemaResolver[Ct extensions: Vector[ast.ObjectLikeTypeExtensionDefinition], definition: ast.FieldDefinition, mat: AstSchemaMaterializer[Ctx] - ): Context[Ctx, _] ⇒ Action[Ctx, _] = { + ): Context[Ctx, _] => Action[Ctx, _] = { val dResolvers = definition.directives flatMap (findResolver(_)) if (dResolvers.nonEmpty) - c ⇒ { + c => { val resultAction = dResolvers.foldLeft(None: Option[Action[Ctx, Any]]) { - case (acc, (d, DirectiveResolver(sd, fn, _))) ⇒ + case (acc, (d, DirectiveResolver(sd, fn, _))) => Some(fn(AstDirectiveContext[Ctx](d, typeDefinition, definition, extensions, c, acc, Args(sd, d)))) - case (acc, (d, ddc @ DynamicDirectiveResolver(_, fn, _))) ⇒ + case (acc, (d, ddc @ DynamicDirectiveResolver(_, fn, _))) => implicit val marshaller = ddc.marshaller Some(fn(DynamicDirectiveContext[Ctx, Any](d, typeDefinition, definition, extensions, c, acc, ResolverBasedAstSchemaBuilder.createDynamicArgs(d)))) - case (acc, _) ⇒ + case (acc, _) => acc } val typeDefinitionName = typeDefinition match { - case Left(t) ⇒ t.name - case Right(t) ⇒ t.name + case Left(t) => t.name + case Right(t) => t.name } resultAction getOrElse (throw SchemaMaterializationException(s"Resolver for '$typeDefinitionName.${definition.name}' haven't returned any action!")) } else findResolver(typeDefinition, definition) match { - case Some(fResolver) ⇒ - fResolver.resolve(typeDefinition → definition) - case None ⇒ + case Some(fResolver) => + fResolver.resolve(typeDefinition -> definition) + case None => findAnyResolver(origin) match { - case Some(fResolver) ⇒ + case Some(fResolver) => fResolver.resolve(origin) - case None ⇒ + case None => super.resolveField(origin, typeDefinition, extensions, definition, mat) } } @@ -150,13 +150,13 @@ class ResolverBasedAstSchemaBuilder[Ctx](val resolvers: Seq[AstSchemaResolver[Ct override def extendFieldResolver(origin: MatOrigin, typeDefinition: Option[ObjectLikeType[Ctx, _]], existing: Field[Ctx, Any], fieldType: OutputType[_], mat: AstSchemaMaterializer[Ctx]) = findExistingResolver(origin, typeDefinition, existing) match { - case Some(fResolver) ⇒ + case Some(fResolver) => fResolver.resolve((origin, typeDefinition, existing)) - case None ⇒ + case None => findAnyResolver(origin) match { - case Some(fResolver) ⇒ + case Some(fResolver) => fResolver.resolve(origin) - case None ⇒ + case None => super.extendFieldResolver(origin, typeDefinition, existing, fieldType, mat) } } @@ -173,12 +173,12 @@ class ResolverBasedAstSchemaBuilder[Ctx](val resolvers: Seq[AstSchemaResolver[Ct val tpe = resolvers.collectFirst { - case DirectiveOutputTypeResolver(d, fn) if definition.directives.exists(_.name == d.name) ⇒ + case DirectiveOutputTypeResolver(d, fn) if definition.directives.exists(_.name == d.name) => val astDirective = definition.directives.find(_.name == d.name).get fn(AstDirectiveOutputTypeContext(origin, astDirective, typeDefinition, definition, extensions, mat, Args(d, astDirective))) - case OutputTypeResolver(fn) if fn isDefinedAt ctx ⇒ fn(ctx) + case OutputTypeResolver(fn) if fn isDefinedAt ctx => fn(ctx) } tpe getOrElse super.buildFieldType(origin, typeDefinition, extensions, definition, arguments, mat) @@ -194,12 +194,12 @@ class ResolverBasedAstSchemaBuilder[Ctx](val resolvers: Seq[AstSchemaResolver[Ct val ctx = AstInputTypeContext(origin, schemaDefinition, astDefinition, astField, definition, mat) resolvers.collectFirst { - case DirectiveInputTypeResolver(d, fn) if definition.directives.exists(_.name == d.name) ⇒ + case DirectiveInputTypeResolver(d, fn) if definition.directives.exists(_.name == d.name) => val astDirective = definition.directives.find(_.name == d.name).get fn(AstDirectiveInputTypeContext(origin, astDirective, schemaDefinition, astDefinition, astField, definition, mat, Args(d, astDirective))) - case InputTypeResolver(fn) if fn isDefinedAt ctx ⇒ fn(ctx) + case InputTypeResolver(fn) if fn isDefinedAt ctx => fn(ctx) } } @@ -231,43 +231,43 @@ class ResolverBasedAstSchemaBuilder[Ctx](val resolvers: Seq[AstSchemaResolver[Ct ) = { val scalar = resolvers.collectFirst { - case DirectiveScalarResolver(d, fn) if definition.directives.exists(_.name == d.name) ⇒ + case DirectiveScalarResolver(d, fn) if definition.directives.exists(_.name == d.name) => val astDirective = definition.directives.find(_.name == d.name).get fn(AstDirectiveScalarContext(astDirective, definition, Args(d, astDirective))) - case ScalarResolver(fn) if fn.isDefinedAt(definition) ⇒ + case ScalarResolver(fn) if fn.isDefinedAt(definition) => fn(definition) } scalar match { - case Some(s) ⇒ Some(s.asInstanceOf[ScalarType[Any]]) - case _ ⇒ super.buildScalarType(origin, extensions, definition, mat) + case Some(s) => Some(s.asInstanceOf[ScalarType[Any]]) + case _ => super.buildScalarType(origin, extensions, definition, mat) } } override def resolveNameConflict(fromOrigin: MatOrigin, types: Vector[MaterializedType]) = - resolvers.collectFirst {case r: ConflictResolver[Ctx] ⇒ r.resolve(fromOrigin, types)} getOrElse + resolvers.collectFirst {case r: ConflictResolver[Ctx] => r.resolve(fromOrigin, types)} getOrElse super.resolveNameConflict(fromOrigin, types) override def fieldComplexity(typeDefinition: Either[ast.TypeDefinition, ObjectLikeType[Ctx, _]], definition: ast.FieldDefinition) = { val dResolvers = definition.directives flatMap (findComplexityResolver(_)) val fromDirectives = - dResolvers.foldLeft(None: Option[(Ctx, Args, Double) ⇒ Double]) { - case (None, (d, DirectiveResolver(sd, _, Some(complexity)))) ⇒ + dResolvers.foldLeft(None: Option[(Ctx, Args, Double) => Double]) { + case (None, (d, DirectiveResolver(sd, _, Some(complexity)))) => Some(complexity(ComplexityDirectiveContext[Ctx](d, typeDefinition, definition, Args(sd, d)))) - case (None, (d, ddc @ DynamicDirectiveResolver(_, _, Some(complexity)))) ⇒ + case (None, (d, ddc @ DynamicDirectiveResolver(_, _, Some(complexity)))) => implicit val marshaller = ddc.marshaller Some(complexity(ComplexityDynamicDirectiveContext[Ctx, Any](d, typeDefinition, definition, ResolverBasedAstSchemaBuilder.createDynamicArgs(d)))) - case (acc, _) ⇒ + case (acc, _) => acc } fromDirectives orElse - findComplexityResolver(typeDefinition, definition).map(_.complexity(typeDefinition → definition)) orElse + findComplexityResolver(typeDefinition, definition).map(_.complexity(typeDefinition -> definition)) orElse super.fieldComplexity(typeDefinition, definition) } @@ -280,12 +280,12 @@ class ResolverBasedAstSchemaBuilder[Ctx](val resolvers: Seq[AstSchemaResolver[Ct val allAstDirectives = typeDefinition.directives ++ extensions.flatMap(_.directives) val materializedFields = - allAstDirectives.flatMap { astDir ⇒ + allAstDirectives.flatMap { astDir => resolvers.collect { - case DirectiveFieldProvider(directive, resolve) if directive.name == astDir.name ⇒ + case DirectiveFieldProvider(directive, resolve) if directive.name == astDir.name => resolve(DirectiveFieldProviderContext[Ctx](origin, astDir, typeDefinition, extensions, mat, Args(directive, astDir))) - case ddfp @ DynamicDirectiveFieldProvider(astDir.name, resolve) ⇒ + case ddfp @ DynamicDirectiveFieldProvider(astDir.name, resolve) => implicit val marshaller = ddfp.marshaller resolve(DynamicDirectiveFieldProviderContext[Ctx, Any](origin, astDir, typeDefinition, extensions, mat, ResolverBasedAstSchemaBuilder.createDynamicArgs(astDir))) @@ -305,7 +305,7 @@ class ResolverBasedAstSchemaBuilder[Ctx](val resolvers: Seq[AstSchemaResolver[Ct val ctx = ExistingScalarContext(origin, extensions, existing.asInstanceOf[ScalarType[Any]], mat) val resolved = resolvers.collectFirst { - case ExistingScalarResolver(resolve) if resolve.isDefinedAt(ctx) ⇒ resolve(ctx).asInstanceOf[ScalarType[T]] + case ExistingScalarResolver(resolve) if resolve.isDefinedAt(ctx) => resolve(ctx).asInstanceOf[ScalarType[T]] } resolved getOrElse super.transformScalarType(origin, extensions, existing, mat) @@ -320,33 +320,33 @@ class ResolverBasedAstSchemaBuilder[Ctx](val resolvers: Seq[AstSchemaResolver[Ct val ctx = ExistingEnumContext[Ctx](origin, extensions, existing.asInstanceOf[EnumType[Any]], mat) val resolved = resolvers.collectFirst { - case ExistingEnumResolver(resolve) if resolve.isDefinedAt(ctx) ⇒ resolve(ctx).asInstanceOf[EnumType[T]] + case ExistingEnumResolver(resolve) if resolve.isDefinedAt(ctx) => resolve(ctx).asInstanceOf[EnumType[T]] } resolved getOrElse super.transformEnumType(origin, extensions, existing, mat) } - override def objectTypeInstanceCheck(origin: MatOrigin, definition: ast.ObjectTypeDefinition, extensions: List[ast.ObjectTypeExtensionDefinition]): Option[(Any, Class[_]) ⇒ Boolean] = { + override def objectTypeInstanceCheck(origin: MatOrigin, definition: ast.ObjectTypeDefinition, extensions: List[ast.ObjectTypeExtensionDefinition]): Option[(Any, Class[_]) => Boolean] = { val ctx = InstanceCheckContext[Ctx](origin, definition, extensions) resolvers.collectFirst { - case InstanceCheck(fn) ⇒ fn(ctx) + case InstanceCheck(fn) => fn(ctx) } } - override def extendedObjectTypeInstanceCheck(origin: MatOrigin, tpe: ObjectType[Ctx, _], extensions: List[ast.ObjectTypeExtensionDefinition]): Option[(Any, Class[_]) ⇒ Boolean] = { + override def extendedObjectTypeInstanceCheck(origin: MatOrigin, tpe: ObjectType[Ctx, _], extensions: List[ast.ObjectTypeExtensionDefinition]): Option[(Any, Class[_]) => Boolean] = { val ctx = ExistingInstanceCheckContext[Ctx](origin, tpe, extensions) resolvers.collectFirst { - case ExistingInstanceCheck(fn) ⇒ fn(ctx) + case ExistingInstanceCheck(fn) => fn(ctx) } } override def enumValue(typeDefinition: Either[ast.EnumTypeDefinition, EnumType[_]], definition: ast.EnumValueDefinition) = { - val ctx = typeDefinition → definition + val ctx = typeDefinition -> definition val resolved = resolvers.collectFirst { - case SimpleEnumValueResolver(fn) if fn.isDefinedAt(ctx) ⇒ fn(ctx) + case SimpleEnumValueResolver(fn) if fn.isDefinedAt(ctx) => fn(ctx) } resolved getOrElse super.enumValue(typeDefinition, definition) @@ -361,82 +361,82 @@ object ResolverBasedAstSchemaBuilder { private def invalidType[In](expected: String, got: In)(implicit iu: InputUnmarshaller[In]) = throw InputMaterializationException(s"Expected $expected value, but got: " + iu.render(got)) - private def safe[T, In](op: ⇒ T, expected: String, got: In)(implicit iu: InputUnmarshaller[In]) = + private def safe[T, In](op: => T, expected: String, got: In)(implicit iu: InputUnmarshaller[In]) = try op catch { - case NonFatal(_) ⇒ invalidType(expected, got) + case NonFatal(_) => invalidType(expected, got) } private def extractScalar[In](t: ScalarType[_], value: In)(implicit iu: InputUnmarshaller[In]) = { val coerced = iu.getScalarValue(value) t match { - case BooleanType ⇒ + case BooleanType => coerced match { - case v: Boolean ⇒ v - case v: String ⇒ safe(v.toBoolean, "Boolean", value) - case _ ⇒ invalidType("Boolean", value) + case v: Boolean => v + case v: String => safe(v.toBoolean, "Boolean", value) + case _ => invalidType("Boolean", value) } - case StringType ⇒ + case StringType => coerced.toString - case IDType ⇒ + case IDType => coerced match { - case s: String ⇒ s - case _ ⇒ invalidType("ID", value) + case s: String => s + case _ => invalidType("ID", value) } - case IntType ⇒ + case IntType => coerced match { - case v: Int ⇒ v - case i: Long if i.isValidInt ⇒ i.toInt - case v: BigInt if v.isValidInt ⇒ v.intValue - case d: Double if d.isValidInt ⇒ d.intValue - case d: BigDecimal if d.isValidInt ⇒ d.intValue - case v: String ⇒ safe(v.toInt, "Int", value) - case _ ⇒ invalidType("Int", value) + case v: Int => v + case i: Long if i.isValidInt => i.toInt + case v: BigInt if v.isValidInt => v.intValue + case d: Double if d.isValidInt => d.intValue + case d: BigDecimal if d.isValidInt => d.intValue + case v: String => safe(v.toInt, "Int", value) + case _ => invalidType("Int", value) } - case LongType ⇒ + case LongType => coerced match { - case i: Int ⇒ i: Long - case i: Long ⇒ i - case i: BigInt if !i.isValidLong ⇒ invalidType("Long", value) - case i: BigInt ⇒ i.longValue - case d: Double if d.isWhole ⇒ d.toLong - case d: BigDecimal if d.isValidLong ⇒ d.longValue - case v: String ⇒ safe(v.toLong, "Long", value) - case _ ⇒ invalidType("Long", value) + case i: Int => i: Long + case i: Long => i + case i: BigInt if !i.isValidLong => invalidType("Long", value) + case i: BigInt => i.longValue + case d: Double if d.isWhole => d.toLong + case d: BigDecimal if d.isValidLong => d.longValue + case v: String => safe(v.toLong, "Long", value) + case _ => invalidType("Long", value) } - case BigIntType ⇒ + case BigIntType => coerced match { - case i: Int ⇒ BigInt(i) - case i: Long ⇒ BigInt(i) - case i: BigInt ⇒ i - case d: Double if d.isWhole ⇒ BigInt(d.toLong) - case d: BigDecimal if d.isWhole ⇒ d.toBigInt - case v: String ⇒ safe(BigInt(v), "BigInt", value) - case _ ⇒ invalidType("BigInt", value) + case i: Int => BigInt(i) + case i: Long => BigInt(i) + case i: BigInt => i + case d: Double if d.isWhole => BigInt(d.toLong) + case d: BigDecimal if d.isWhole => d.toBigInt + case v: String => safe(BigInt(v), "BigInt", value) + case _ => invalidType("BigInt", value) } - case BigDecimalType ⇒ + case BigDecimalType => coerced match { - case i: Int ⇒ BigDecimal(i) - case i: Long ⇒ BigDecimal(i) - case i: BigInt ⇒ BigDecimal(i) - case d: Double ⇒ BigDecimal(d) - case d: BigDecimal ⇒ d - case v: String ⇒ safe(BigDecimal(v), "BigDecimal", value) - case _ ⇒ invalidType("BigDecimal", value) + case i: Int => BigDecimal(i) + case i: Long => BigDecimal(i) + case i: BigInt => BigDecimal(i) + case d: Double => BigDecimal(d) + case d: BigDecimal => d + case v: String => safe(BigDecimal(v), "BigDecimal", value) + case _ => invalidType("BigDecimal", value) } - case FloatType ⇒ + case FloatType => coerced match { - case i: Int ⇒ i.toDouble - case i: Long ⇒ i.toDouble - case i: BigInt if !i.isValidDouble ⇒ invalidType("Float", value) - case i: BigInt ⇒ i.doubleValue - case d: Double ⇒ d - case d: BigDecimal if !d.isDecimalDouble ⇒ invalidType("Float", value) - case d: BigDecimal ⇒ d.doubleValue - case v: String ⇒ safe(v.toDouble, "Float", value) - case _ ⇒ invalidType("Float", value) + case i: Int => i.toDouble + case i: Long => i.toDouble + case i: BigInt if !i.isValidDouble => invalidType("Float", value) + case i: BigInt => i.doubleValue + case d: Double => d + case d: BigDecimal if !d.isDecimalDouble => invalidType("Float", value) + case d: BigDecimal => d.doubleValue + case v: String => safe(v.toDouble, "Float", value) + case _ => invalidType("Float", value) } - case _ ⇒ coerced + case _ => coerced } } @@ -444,18 +444,18 @@ object ResolverBasedAstSchemaBuilder { iu.getScalarValue(value).toString def extractValue[In](tpe: OutputType[_], value: Option[In])(implicit iu: InputUnmarshaller[In]): Any = tpe match { - case OptionType(ofType) ⇒ Option(extractValue(ofType, value)) - case _ if value.isEmpty || !iu.isDefined(value.get) ⇒ null - case ListType(ofType) ⇒ iu.getListValue(value.get) map (v ⇒ extractValue(ofType, Some(v))) - case t: ScalarAlias[_, _] ⇒ extractValue(t.aliasFor, value) - case t: ScalarType[_] ⇒ extractScalar(t, value.get) - case t: EnumType[_] ⇒ extractEnum(t, value.get) - case _: CompositeType[_] ⇒ + case OptionType(ofType) => Option(extractValue(ofType, value)) + case _ if value.isEmpty || !iu.isDefined(value.get) => null + case ListType(ofType) => iu.getListValue(value.get) map (v => extractValue(ofType, Some(v))) + case t: ScalarAlias[_, _] => extractValue(t.aliasFor, value) + case t: ScalarType[_] => extractScalar(t, value.get) + case t: EnumType[_] => extractEnum(t, value.get) + case _: CompositeType[_] => val objValue = value.get if (iu.isMapNode(objValue)) objValue else invalidType("Object", objValue) - case t ⇒ throw SchemaMaterializationException(s"Extractor for a type '${SchemaRenderer.renderTypeName(t)}' is not supported yet.") + case t => throw SchemaMaterializationException(s"Extractor for a type '${SchemaRenderer.renderTypeName(t)}' is not supported yet.") } def extractFieldValue[Ctx, In](context: Context[Ctx, _])(implicit iu: InputUnmarshaller[In]): Any = @@ -468,27 +468,27 @@ object ResolverBasedAstSchemaBuilder { else extractValue(field.fieldType, iu.getMapValue(value, field.name)) catch { - case e: SchemaMaterializationException ⇒ + case e: SchemaMaterializationException => throw e - case NonFatal(e) ⇒ + case NonFatal(e) => throw SchemaMaterializationException(s"Can't extract value for a field '${parentType.name}.${field.name}'.", e) } } def defaultInputResolver[Ctx, In : InputUnmarshaller] = FieldResolver[Ctx] { - case (_, _) ⇒ extractFieldValue[Ctx, In] + case (_, _) => extractFieldValue[Ctx, In] } def defaultExistingInputResolver[Ctx, In : InputUnmarshaller] = ExistingFieldResolver[Ctx] { - case (_, _, _) ⇒ extractFieldValue[Ctx, In] + case (_, _, _) => extractFieldValue[Ctx, In] } def defaultAnyInputResolver[Ctx, In : InputUnmarshaller] = AnyFieldResolver[Ctx] { - case origin if !origin.isInstanceOf[ExistingSchemaOrigin[_, _]] ⇒ extractFieldValue[Ctx, In] + case origin if !origin.isInstanceOf[ExistingSchemaOrigin[_, _]] => extractFieldValue[Ctx, In] } def resolveDirectives[T](schema: ast.Document, resolvers: AstSchemaGenericResolver[T]*): Vector[T] = { @@ -498,16 +498,16 @@ object ResolverBasedAstSchemaBuilder { ast.AstVisitor.visit(schema, ast.AstVisitor( onEnter = { - case node: ast.WithDirectives ⇒ + case node: ast.WithDirectives => stack.push(node) result ++= - node.directives.flatMap { astDir ⇒ + node.directives.flatMap { astDir => findByLocation(stack, node, resolversByName.getOrElse(astDir.name, Nil)) .flatMap { - case GenericDirectiveResolver(directive, _, resolve) ⇒ + case GenericDirectiveResolver(directive, _, resolve) => resolve(GenericDirectiveContext(astDir, node, Args(directive, astDir))) - case gd @ GenericDynamicDirectiveResolver(_, _, resolve) ⇒ + case gd @ GenericDynamicDirectiveResolver(_, _, resolve) => implicit val marshaller = gd.marshaller resolve(GenericDynamicDirectiveContext(astDir, node, createDynamicArgs(astDir))) @@ -516,12 +516,12 @@ object ResolverBasedAstSchemaBuilder { VisitorCommand.Continue - case node ⇒ + case node => stack.push(node) VisitorCommand.Continue }, onLeave = { - case _ ⇒ + case _ => stack.pop() VisitorCommand.Continue } @@ -534,11 +534,11 @@ object ResolverBasedAstSchemaBuilder { import sangria.marshalling.queryAst._ import sangria.marshalling.MarshallingUtil._ - val value: ast.Value = ast.ObjectValue(astDirective.arguments.map(arg ⇒ ast.ObjectField(arg.name, arg.value))) + val value: ast.Value = ast.ObjectValue(astDirective.arguments.map(arg => ast.ObjectField(arg.name, arg.value))) value.convertMarshaled[T] } private def findByLocation[T](visitorStack: ValidatorStack[ast.AstNode], node: ast.AstNode, directives: Seq[AstSchemaGenericResolver[T]]) = - directives.filter(d ⇒ d.locations.isEmpty || KnownDirectives.getLocation(node, visitorStack.head(1)).fold(false)(l ⇒ d.locations contains l._1)) + directives.filter(d => d.locations.isEmpty || KnownDirectives.getLocation(node, visitorStack.head(1)).fold(false)(l => d.locations contains l._1)) } diff --git a/src/main/scala/sangria/schema/Schema.scala b/src/main/scala/sangria/schema/Schema.scala index 806b28b8..521c0bf2 100644 --- a/src/main/scala/sangria/schema/Schema.scala +++ b/src/main/scala/sangria/schema/Schema.scala @@ -21,12 +21,12 @@ sealed trait Type { def namedType: Type with Named = { def getNamedType(tpe: Type): Type with Named = tpe match { - case OptionInputType(ofType) ⇒ getNamedType(ofType) - case OptionType(ofType) ⇒ getNamedType(ofType) - case ListInputType(ofType) ⇒ getNamedType(ofType) - case ListType(ofType) ⇒ getNamedType(ofType) - case n: Named ⇒ n - case t ⇒ throw new IllegalStateException("Expected named type, but got: " + t) + case OptionInputType(ofType) => getNamedType(ofType) + case OptionType(ofType) => getNamedType(ofType) + case ListInputType(ofType) => getNamedType(ofType) + case ListType(ofType) => getNamedType(ofType) + case n: Named => n + case t => throw new IllegalStateException("Expected named type, but got: " + t) } getNamedType(this) @@ -35,20 +35,20 @@ sealed trait Type { sealed trait InputType[+T] extends Type { lazy val isOptional = this match { - case _: OptionInputType[_] ⇒ true - case _ ⇒ false + case _: OptionInputType[_] => true + case _ => false } lazy val isList = this match { - case _: ListInputType[_] ⇒ true - case _ ⇒ false + case _: ListInputType[_] => true + case _ => false } lazy val isNamed = !(isOptional && isList) lazy val nonOptionalType = this match { - case tpe: OptionInputType[_] ⇒ tpe.ofType - case tpe ⇒ tpe + case tpe: OptionInputType[_] => tpe.ofType + case tpe => tpe } def namedInputType: InputType[_] = namedType.asInstanceOf[InputType[_]] @@ -119,9 +119,9 @@ object Named { case class ScalarType[T]( name: String, description: Option[String] = None, - coerceUserInput: Any ⇒ Either[Violation, T], - coerceOutput: (T, Set[MarshallerCapability]) ⇒ Any, - coerceInput: ast.Value ⇒ Either[Violation, T], + coerceUserInput: Any => Either[Violation, T], + coerceOutput: (T, Set[MarshallerCapability]) => Any, + coerceInput: ast.Value => Either[Violation, T], complexity: Double = 0.0D, scalarInfo: Set[ScalarValueInfo] = Set.empty, astDirectives: Vector[ast.Directive] = Vector.empty, @@ -133,8 +133,8 @@ case class ScalarType[T]( case class ScalarAlias[T, ST]( aliasFor: ScalarType[ST], - toScalar: T ⇒ ST, - fromScalar: ST ⇒ Either[Violation, T] + toScalar: T => ST, + fromScalar: ST => Either[Violation, T] ) extends InputType[T @@ CoercedScalaResult] with OutputType[T] with LeafType with NullableType with UnmodifiedType with Named { def name = aliasFor.name def description = aliasFor.description @@ -147,22 +147,22 @@ case class ScalarAlias[T, ST]( sealed trait ObjectLikeType[Ctx, Val] extends OutputType[Val] with CompositeType[Val] with NullableType with UnmodifiedType with Named with HasAstInfo { def interfaces: List[InterfaceType[Ctx, _]] - def fieldsFn: () ⇒ List[Field[Ctx, Val]] + def fieldsFn: () => List[Field[Ctx, Val]] lazy val ownFields = fieldsFn().toVector - private def removeDuplicates[T, E](list: Vector[T], valueFn: T ⇒ E) = + private def removeDuplicates[T, E](list: Vector[T], valueFn: T => E) = list.foldLeft((Vector.empty, Vector.empty): (Vector[E], Vector[T])) { - case (a @ (visited, acc), e) if visited contains valueFn(e) ⇒ a - case ((visited, acc), e) ⇒ (visited :+ valueFn(e), acc :+ e) + case (a @ (visited, acc), e) if visited contains valueFn(e) => a + case ((visited, acc), e) => (visited :+ valueFn(e), acc :+ e) }._2 lazy val allInterfaces: Vector[InterfaceType[Ctx, _]] = - removeDuplicates(interfaces.toVector.flatMap(i ⇒ i +: i.allInterfaces), (i: InterfaceType[Ctx, _]) ⇒ i.name) + removeDuplicates(interfaces.toVector.flatMap(i => i +: i.allInterfaces), (i: InterfaceType[Ctx, _]) => i.name) - lazy val fields: Vector[Field[Ctx, _]] = ownFields ++ interfaces.flatMap(i ⇒ i.fields.asInstanceOf[Vector[Field[Ctx, _]]]) + lazy val fields: Vector[Field[Ctx, _]] = ownFields ++ interfaces.flatMap(i => i.fields.asInstanceOf[Vector[Field[Ctx, _]]]) - lazy val uniqueFields: Vector[Field[Ctx, _]] = removeDuplicates(fields, (e: Field[Ctx, _]) ⇒ e.name) + lazy val uniqueFields: Vector[Field[Ctx, _]] = removeDuplicates(fields, (e: Field[Ctx, _]) => e.name) lazy val fieldsByName: Map[String, Vector[Field[Ctx, _]]] = fields groupBy (_.name) @@ -180,15 +180,15 @@ sealed trait ObjectLikeType[Ctx, Val] extends OutputType[Val] with CompositeType case class ObjectType[Ctx, Val: ClassTag] ( name: String, description: Option[String], - fieldsFn: () ⇒ List[Field[Ctx, Val]], + fieldsFn: () => List[Field[Ctx, Val]], interfaces: List[InterfaceType[Ctx, _]], - instanceCheck: (Any, Class[_], ObjectType[Ctx, Val]) ⇒ Boolean, + instanceCheck: (Any, Class[_], ObjectType[Ctx, Val]) => Boolean, astDirectives: Vector[ast.Directive], astNodes: Vector[ast.AstNode] ) extends ObjectLikeType[Ctx, Val] { lazy val valClass = implicitly[ClassTag[Val]].runtimeClass - def withInstanceCheck(fn: (Any, Class[_], ObjectType[Ctx, Val]) ⇒ Boolean) = + def withInstanceCheck(fn: (Any, Class[_], ObjectType[Ctx, Val]) => Boolean) = copy(instanceCheck = fn) def isInstanceOf(value: Any) = instanceCheck(value, valClass, this) @@ -198,66 +198,66 @@ case class ObjectType[Ctx, Val: ClassTag] ( object ObjectType { def apply[Ctx, Val: ClassTag](name: String, fields: List[Field[Ctx, Val]]): ObjectType[Ctx, Val] = - ObjectType(name, None, fieldsFn = () ⇒ fields, Nil, instanceCheck = defaultInstanceCheck, Vector.empty, Vector.empty) + ObjectType(name, None, fieldsFn = () => fields, Nil, instanceCheck = defaultInstanceCheck, Vector.empty, Vector.empty) def apply[Ctx, Val: ClassTag](name: String, description: String, fields: List[Field[Ctx, Val]]): ObjectType[Ctx, Val] = - ObjectType(name, Some(description), fieldsFn = () ⇒ fields, Nil, instanceCheck = defaultInstanceCheck, Vector.empty, Vector.empty) + ObjectType(name, Some(description), fieldsFn = () => fields, Nil, instanceCheck = defaultInstanceCheck, Vector.empty, Vector.empty) def apply[Ctx, Val: ClassTag](name: String, interfaces: List[PossibleInterface[Ctx, Val]], fields: List[Field[Ctx, Val]]): ObjectType[Ctx, Val] = - ObjectType(name, None, fieldsFn = () ⇒ fields, interfaces map (_.interfaceType), instanceCheck = defaultInstanceCheck, Vector.empty, Vector.empty) + ObjectType(name, None, fieldsFn = () => fields, interfaces map (_.interfaceType), instanceCheck = defaultInstanceCheck, Vector.empty, Vector.empty) def apply[Ctx, Val: ClassTag](name: String, description: String, interfaces: List[PossibleInterface[Ctx, Val]], fields: List[Field[Ctx, Val]]): ObjectType[Ctx, Val] = - ObjectType(name, Some(description), fieldsFn = () ⇒ fields, interfaces map (_.interfaceType), instanceCheck = defaultInstanceCheck, Vector.empty, Vector.empty) + ObjectType(name, Some(description), fieldsFn = () => fields, interfaces map (_.interfaceType), instanceCheck = defaultInstanceCheck, Vector.empty, Vector.empty) - def apply[Ctx, Val: ClassTag](name: String, fieldsFn: () ⇒ List[Field[Ctx, Val]]): ObjectType[Ctx, Val] = + def apply[Ctx, Val: ClassTag](name: String, fieldsFn: () => List[Field[Ctx, Val]]): ObjectType[Ctx, Val] = ObjectType(name, None, fieldsFn, Nil, instanceCheck = defaultInstanceCheck, Vector.empty, Vector.empty) - def apply[Ctx, Val: ClassTag](name: String, description: String, fieldsFn: () ⇒ List[Field[Ctx, Val]]): ObjectType[Ctx, Val] = + def apply[Ctx, Val: ClassTag](name: String, description: String, fieldsFn: () => List[Field[Ctx, Val]]): ObjectType[Ctx, Val] = ObjectType(name, Some(description), fieldsFn, Nil, instanceCheck = defaultInstanceCheck, Vector.empty, Vector.empty) - def apply[Ctx, Val: ClassTag](name: String, interfaces: List[PossibleInterface[Ctx, Val]], fieldsFn: () ⇒ List[Field[Ctx, Val]]): ObjectType[Ctx, Val] = + def apply[Ctx, Val: ClassTag](name: String, interfaces: List[PossibleInterface[Ctx, Val]], fieldsFn: () => List[Field[Ctx, Val]]): ObjectType[Ctx, Val] = ObjectType(name, None, fieldsFn, interfaces map (_.interfaceType), instanceCheck = defaultInstanceCheck, Vector.empty, Vector.empty) - def apply[Ctx, Val: ClassTag](name: String, description: String, interfaces: List[PossibleInterface[Ctx, Val]], fieldsFn: () ⇒ List[Field[Ctx, Val]]): ObjectType[Ctx, Val] = + def apply[Ctx, Val: ClassTag](name: String, description: String, interfaces: List[PossibleInterface[Ctx, Val]], fieldsFn: () => List[Field[Ctx, Val]]): ObjectType[Ctx, Val] = ObjectType(name, Some(description), fieldsFn, interfaces map (_.interfaceType), instanceCheck = defaultInstanceCheck, Vector.empty, Vector.empty) - def createFromMacro[Ctx, Val: ClassTag](name: String, description: Option[String], interfaces: List[InterfaceType[Ctx, _]], fieldsFn: () ⇒ List[Field[Ctx, Val]]) = + def createFromMacro[Ctx, Val: ClassTag](name: String, description: Option[String], interfaces: List[InterfaceType[Ctx, _]], fieldsFn: () => List[Field[Ctx, Val]]) = ObjectType(name, description, fieldsFn, interfaces, instanceCheck = defaultInstanceCheck, Vector.empty, Vector.empty) implicit def acceptUnitCtx[Ctx, Val](objectType: ObjectType[Unit, Val]): ObjectType[Ctx, Val] = objectType.asInstanceOf[ObjectType[Ctx, Val]] - def defaultInstanceCheck[Ctx, Val]: (Any, Class[_], ObjectType[Ctx, Val]) ⇒ Boolean = - (value, valClass, tpe) ⇒ valClass.isAssignableFrom(value.getClass) + def defaultInstanceCheck[Ctx, Val]: (Any, Class[_], ObjectType[Ctx, Val]) => Boolean = + (value, valClass, tpe) => valClass.isAssignableFrom(value.getClass) } case class InterfaceType[Ctx, Val]( name: String, description: Option[String] = None, - fieldsFn: () ⇒ List[Field[Ctx, Val]], + fieldsFn: () => List[Field[Ctx, Val]], interfaces: List[InterfaceType[Ctx, _]], - manualPossibleTypes: () ⇒ List[ObjectType[_, _]], + manualPossibleTypes: () => List[ObjectType[_, _]], astDirectives: Vector[ast.Directive], astNodes: Vector[ast.AstNode] = Vector.empty ) extends ObjectLikeType[Ctx, Val] with AbstractType { - def withPossibleTypes(possible: PossibleObject[Ctx, Val]*) = copy(manualPossibleTypes = () ⇒ possible.toList map (_.objectType)) - def withPossibleTypes(possible: () ⇒ List[PossibleObject[Ctx, Val]]) = copy(manualPossibleTypes = () ⇒ possible() map (_.objectType)) + def withPossibleTypes(possible: PossibleObject[Ctx, Val]*) = copy(manualPossibleTypes = () => possible.toList map (_.objectType)) + def withPossibleTypes(possible: () => List[PossibleObject[Ctx, Val]]) = copy(manualPossibleTypes = () => possible() map (_.objectType)) def rename(newName: String) = copy(name = newName).asInstanceOf[this.type] } object InterfaceType { - val emptyPossibleTypes: () ⇒ List[ObjectType[_, _]] = () ⇒ Nil + val emptyPossibleTypes: () => List[ObjectType[_, _]] = () => Nil def apply[Ctx, Val](name: String, fields: List[Field[Ctx, Val]]): InterfaceType[Ctx, Val] = - InterfaceType(name, None, fieldsFn = () ⇒ fields, Nil, emptyPossibleTypes, Vector.empty, Vector.empty) + InterfaceType(name, None, fieldsFn = () => fields, Nil, emptyPossibleTypes, Vector.empty, Vector.empty) def apply[Ctx, Val](name: String, description: String, fields: List[Field[Ctx, Val]]): InterfaceType[Ctx, Val] = - InterfaceType(name, Some(description), fieldsFn = () ⇒ fields, Nil, emptyPossibleTypes, Vector.empty, Vector.empty) + InterfaceType(name, Some(description), fieldsFn = () => fields, Nil, emptyPossibleTypes, Vector.empty, Vector.empty) def apply[Ctx, Val](name: String, fields: List[Field[Ctx, Val]], interfaces: List[PossibleInterface[Ctx, Val]]): InterfaceType[Ctx, Val] = - InterfaceType(name, None, fieldsFn = () ⇒ fields, interfaces map (_.interfaceType), emptyPossibleTypes, Vector.empty, Vector.empty) + InterfaceType(name, None, fieldsFn = () => fields, interfaces map (_.interfaceType), emptyPossibleTypes, Vector.empty, Vector.empty) def apply[Ctx, Val](name: String, description: String, fields: List[Field[Ctx, Val]], interfaces: List[PossibleInterface[Ctx, Val]]): InterfaceType[Ctx, Val] = - InterfaceType(name, Some(description), fieldsFn = () ⇒ fields, interfaces map (_.interfaceType), emptyPossibleTypes, Vector.empty, Vector.empty) + InterfaceType(name, Some(description), fieldsFn = () => fields, interfaces map (_.interfaceType), emptyPossibleTypes, Vector.empty, Vector.empty) - def apply[Ctx, Val](name: String, fieldsFn: () ⇒ List[Field[Ctx, Val]]): InterfaceType[Ctx, Val] = + def apply[Ctx, Val](name: String, fieldsFn: () => List[Field[Ctx, Val]]): InterfaceType[Ctx, Val] = InterfaceType(name, None, fieldsFn, Nil, emptyPossibleTypes, Vector.empty, Vector.empty) - def apply[Ctx, Val](name: String, description: String, fieldsFn: () ⇒ List[Field[Ctx, Val]]): InterfaceType[Ctx, Val] = + def apply[Ctx, Val](name: String, description: String, fieldsFn: () => List[Field[Ctx, Val]]): InterfaceType[Ctx, Val] = InterfaceType(name, Some(description), fieldsFn, Nil, emptyPossibleTypes, Vector.empty, Vector.empty) - def apply[Ctx, Val](name: String, fieldsFn: () ⇒ List[Field[Ctx, Val]], interfaces: List[PossibleInterface[Ctx, Val]]): InterfaceType[Ctx, Val] = + def apply[Ctx, Val](name: String, fieldsFn: () => List[Field[Ctx, Val]], interfaces: List[PossibleInterface[Ctx, Val]]): InterfaceType[Ctx, Val] = InterfaceType(name, None, fieldsFn, interfaces map (_.interfaceType), emptyPossibleTypes, Vector.empty, Vector.empty) - def apply[Ctx, Val](name: String, description: String, fieldsFn: () ⇒ List[Field[Ctx, Val]], interfaces: List[PossibleInterface[Ctx, Val]]): InterfaceType[Ctx, Val] = + def apply[Ctx, Val](name: String, description: String, fieldsFn: () => List[Field[Ctx, Val]], interfaces: List[PossibleInterface[Ctx, Val]]): InterfaceType[Ctx, Val] = InterfaceType(name, Some(description), fieldsFn, interfaces map (_.interfaceType), emptyPossibleTypes, Vector.empty, Vector.empty) } @@ -306,7 +306,7 @@ case class UnionType[Ctx]( /** * Creates a type-safe version of union type which might be useful in cases where the value is wrapped in a type like `Either`. */ - def mapValue[T](func: T ⇒ Any): OutputType[T] = new UnionType[Ctx](name, description, types, astDirectives, astNodes) with MappedAbstractType[T] { + def mapValue[T](func: T => Any): OutputType[T] = new UnionType[Ctx](name, description, types, astDirectives, astNodes) with MappedAbstractType[T] { override def contraMap(value: T): Any = func(value) }.asInstanceOf[OutputType[T]] } @@ -316,15 +316,15 @@ case class Field[Ctx, Val]( fieldType: OutputType[_], description: Option[String], arguments: List[Argument[_]], - resolve: Context[Ctx, Val] ⇒ Action[Ctx, _], + resolve: Context[Ctx, Val] => Action[Ctx, _], deprecationReason: Option[String], tags: List[FieldTag], - complexity: Option[(Ctx, Args, Double) ⇒ Double], - manualPossibleTypes: () ⇒ List[ObjectType[_, _]], + complexity: Option[(Ctx, Args, Double) => Double], + manualPossibleTypes: () => List[ObjectType[_, _]], astDirectives: Vector[ast.Directive], astNodes: Vector[ast.AstNode]) extends Named with HasArguments with HasDeprecation with HasAstInfo { - def withPossibleTypes(possible: PossibleObject[Ctx, Val]*) = copy(manualPossibleTypes = () ⇒ possible.toList map (_.objectType)) - def withPossibleTypes(possible: () ⇒ List[PossibleObject[Ctx, Val]]) = copy(manualPossibleTypes = () ⇒ possible() map (_.objectType)) + def withPossibleTypes(possible: PossibleObject[Ctx, Val]*) = copy(manualPossibleTypes = () => possible.toList map (_.objectType)) + def withPossibleTypes(possible: () => List[PossibleObject[Ctx, Val]]) = copy(manualPossibleTypes = () => possible() map (_.objectType)) def rename(newName: String) = copy(name = newName).asInstanceOf[this.type] def toAst: ast.FieldDefinition = SchemaRenderer.renderField(this) } @@ -335,22 +335,22 @@ object Field { fieldType: OutputType[Out], description: Option[String] = None, arguments: List[Argument[_]] = Nil, - resolve: Context[Ctx, Val] ⇒ Action[Ctx, Res], - possibleTypes: ⇒ List[PossibleObject[_, _]] = Nil, + resolve: Context[Ctx, Val] => Action[Ctx, Res], + possibleTypes: => List[PossibleObject[_, _]] = Nil, tags: List[FieldTag] = Nil, - complexity: Option[(Ctx, Args, Double) ⇒ Double] = None, + complexity: Option[(Ctx, Args, Double) => Double] = None, deprecationReason: Option[String] = None)(implicit ev: ValidOutType[Res, Out]): Field[Ctx, Val] = - Field[Ctx, Val](name, fieldType, description, arguments, resolve, deprecationReason, tags, complexity, () ⇒ possibleTypes map (_.objectType), Vector.empty, Vector.empty) + Field[Ctx, Val](name, fieldType, description, arguments, resolve, deprecationReason, tags, complexity, () => possibleTypes map (_.objectType), Vector.empty, Vector.empty) def subs[Ctx, Val, StreamSource, Res, Out]( name: String, fieldType: OutputType[Out], description: Option[String] = None, arguments: List[Argument[_]] = Nil, - resolve: Context[Ctx, Val] ⇒ StreamSource, - possibleTypes: ⇒ List[PossibleObject[_, _]] = Nil, + resolve: Context[Ctx, Val] => StreamSource, + possibleTypes: => List[PossibleObject[_, _]] = Nil, tags: List[FieldTag] = Nil, - complexity: Option[(Ctx, Args, Double) ⇒ Double] = None, + complexity: Option[(Ctx, Args, Double) => Double] = None, deprecationReason: Option[String] = None )(implicit stream: SubscriptionStreamLike[StreamSource, Action, Ctx, Res, Out]): Field[Ctx, Val] = { val s = stream.subscriptionStream @@ -360,11 +360,11 @@ object Field { fieldType, description, arguments, - ctx ⇒ SubscriptionValue[Ctx, StreamSource, stream.StreamSource](resolve(ctx), s), + ctx => SubscriptionValue[Ctx, StreamSource, stream.StreamSource](resolve(ctx), s), deprecationReason, SubscriptionField[stream.StreamSource](s) +: tags, complexity, - () ⇒ possibleTypes map (_.objectType), + () => possibleTypes map (_.objectType), Vector.empty, Vector.empty) } @@ -407,13 +407,13 @@ object Argument { argumentType: InputType[T], description: String, defaultValue: Default)(implicit toInput: ToInput[Default, _], fromInput: FromInput[T], res: ArgumentType[T]): Argument[res.Res] = - Argument(name, argumentType, Some(description), Some(defaultValue → toInput), fromInput, Vector.empty, Vector.empty) + Argument(name, argumentType, Some(description), Some(defaultValue -> toInput), fromInput, Vector.empty, Vector.empty) def apply[T, Default]( name: String, argumentType: InputType[T], defaultValue: Default)(implicit toInput: ToInput[Default, _], fromInput: FromInput[T], res: ArgumentType[T]): Argument[res.Res] = - Argument(name, argumentType, None, Some(defaultValue → toInput), fromInput, Vector.empty, Vector.empty) + Argument(name, argumentType, None, Some(defaultValue -> toInput), fromInput, Vector.empty, Vector.empty) def apply[T]( name: String, @@ -437,7 +437,7 @@ object Argument { argumentType: InputType[T], description: Option[String], defaultValue: Default)(implicit toInput: ToInput[Default, _], fromInput: FromInput[T], res: ArgumentType[T]): Argument[res.Res] = - Argument(name, argumentType, description, Some(defaultValue → toInput), fromInput, Vector.empty, Vector.empty) + Argument(name, argumentType, description, Some(defaultValue -> toInput), fromInput, Vector.empty, Vector.empty) } trait WithoutInputTypeTags[T] { @@ -560,14 +560,14 @@ case class EnumType[T]( lazy val byValue = values groupBy (_.value) mapValues (_.head) def coerceUserInput(value: Any): Either[Violation, (T, Boolean)] = value match { - case valueName: String ⇒ byName get valueName map (v ⇒ Right(v.value → v.deprecationReason.isDefined)) getOrElse Left(EnumValueCoercionViolation(valueName, name, values.map(_.name))) - case v if byValue exists (_._1 == v) ⇒ Right(v.asInstanceOf[T] → byValue(v.asInstanceOf[T]).deprecationReason.isDefined) - case _ ⇒ Left(EnumCoercionViolation) + case valueName: String => byName get valueName map (v => Right(v.value -> v.deprecationReason.isDefined)) getOrElse Left(EnumValueCoercionViolation(valueName, name, values.map(_.name))) + case v if byValue exists (_._1 == v) => Right(v.asInstanceOf[T] -> byValue(v.asInstanceOf[T]).deprecationReason.isDefined) + case _ => Left(EnumCoercionViolation) } def coerceInput(value: ast.Value): Either[Violation, (T, Boolean)] = value match { - case ast.EnumValue(valueName, _, _) ⇒ byName get valueName map (v ⇒ Right(v.value → v.deprecationReason.isDefined)) getOrElse Left(EnumValueCoercionViolation(valueName, name, values.map(_.name))) - case _ ⇒ Left(EnumCoercionViolation) + case ast.EnumValue(valueName, _, _) => byName get valueName map (v => Right(v.value -> v.deprecationReason.isDefined)) getOrElse Left(EnumValueCoercionViolation(valueName, name, values.map(_.name))) + case _ => Left(EnumCoercionViolation) } def coerceOutput(value: T): String = byValue(value).name @@ -590,7 +590,7 @@ case class EnumValue[+T]( case class InputObjectType[T]( name: String, description: Option[String] = None, - fieldsFn: () ⇒ List[InputField[_]], + fieldsFn: () => List[InputField[_]], astDirectives: Vector[ast.Directive], astNodes: Vector[ast.AstNode] ) extends InputType[T @@ InputObjectResult] with NullableType with UnmodifiedType with Named with HasAstInfo { @@ -605,16 +605,16 @@ object InputObjectType { type DefaultInput = Map[String, Any] def apply[T](name: String, fields: List[InputField[_]])(implicit res: InputObjectDefaultResult[T]): InputObjectType[res.Res] = - InputObjectType(name, None, fieldsFn = () ⇒ fields, Vector.empty, Vector.empty) + InputObjectType(name, None, fieldsFn = () => fields, Vector.empty, Vector.empty) def apply[T](name: String, description: String, fields: List[InputField[_]])(implicit res: InputObjectDefaultResult[T]): InputObjectType[res.Res] = - InputObjectType(name, Some(description), fieldsFn = () ⇒ fields, Vector.empty, Vector.empty) + InputObjectType(name, Some(description), fieldsFn = () => fields, Vector.empty, Vector.empty) - def apply[T](name: String, fieldsFn: () ⇒ List[InputField[_]])(implicit res: InputObjectDefaultResult[T]): InputObjectType[res.Res] = + def apply[T](name: String, fieldsFn: () => List[InputField[_]])(implicit res: InputObjectDefaultResult[T]): InputObjectType[res.Res] = InputObjectType(name, None, fieldsFn, Vector.empty, Vector.empty) - def apply[T](name: String, description: String, fieldsFn: () ⇒ List[InputField[_]])(implicit res: InputObjectDefaultResult[T]): InputObjectType[res.Res] = + def apply[T](name: String, description: String, fieldsFn: () => List[InputField[_]])(implicit res: InputObjectDefaultResult[T]): InputObjectType[res.Res] = InputObjectType(name, Some(description), fieldsFn, Vector.empty, Vector.empty) - def createFromMacro[T](name: String, description: Option[String] = None, fieldsFn: () ⇒ List[InputField[_]]) = + def createFromMacro[T](name: String, description: Option[String] = None, fieldsFn: () => List[InputField[_]]) = InputObjectType[T](name, description, fieldsFn, Vector.empty, Vector.empty) } @@ -649,10 +649,10 @@ case class InputField[T]( object InputField { def apply[T, Default](name: String, fieldType: InputType[T], description: String, defaultValue: Default)(implicit toInput: ToInput[Default, _], res: WithoutInputTypeTags[T]): InputField[res.Res] = - InputField(name, fieldType, Some(description), Some(defaultValue → toInput), Vector.empty, Vector.empty).asInstanceOf[InputField[res.Res]] + InputField(name, fieldType, Some(description), Some(defaultValue -> toInput), Vector.empty, Vector.empty).asInstanceOf[InputField[res.Res]] def apply[T, Default](name: String, fieldType: InputType[T], defaultValue: Default)(implicit toInput: ToInput[Default, _], res: WithoutInputTypeTags[T]): InputField[res.Res] = - InputField(name, fieldType, None, Some(defaultValue → toInput), Vector.empty, Vector.empty).asInstanceOf[InputField[res.Res]] + InputField(name, fieldType, None, Some(defaultValue -> toInput), Vector.empty, Vector.empty).asInstanceOf[InputField[res.Res]] def apply[T](name: String, fieldType: InputType[T], description: String)(implicit res: WithoutInputTypeTags[T]): InputField[res.Res] = InputField(name, fieldType, Some(description), None, Vector.empty, Vector.empty).asInstanceOf[InputField[res.Res]] @@ -663,7 +663,7 @@ object InputField { def createFromMacroWithDefault[T, Default]( name: String, fieldType: InputType[T], description: Option[String], defaultValue: Default )(implicit toInput: ToInput[Default, _], res: WithoutInputTypeTags[T]): InputField[res.Res] = - InputField(name, fieldType, description, Some(defaultValue → toInput), Vector.empty, Vector.empty).asInstanceOf[InputField[res.Res]] + InputField(name, fieldType, description, Some(defaultValue -> toInput), Vector.empty, Vector.empty).asInstanceOf[InputField[res.Res]] def createFromMacroWithoutDefault[T](name: String, fieldType: InputType[T], description: Option[String])(implicit res: WithoutInputTypeTags[T]): InputField[res.Res] = InputField(name, fieldType, description, None, Vector.empty, Vector.empty).asInstanceOf[InputField[res.Res]] @@ -701,49 +701,49 @@ object DirectiveLocation extends Enumeration { val VariableDefinition = Value def fromString(location: String): DirectiveLocation.Value = location match { - case "QUERY" ⇒ Query - case "MUTATION" ⇒ Mutation - case "SUBSCRIPTION" ⇒ Subscription - case "FIELD" ⇒ Field - case "FRAGMENT_DEFINITION" ⇒ FragmentDefinition - case "FRAGMENT_SPREAD" ⇒ FragmentSpread - case "INLINE_FRAGMENT" ⇒ InlineFragment - case "VARIABLE_DEFINITION" ⇒ VariableDefinition - - case "SCHEMA" ⇒ Schema - case "SCALAR" ⇒ Scalar - case "OBJECT" ⇒ Object - case "FIELD_DEFINITION" ⇒ FieldDefinition - case "ARGUMENT_DEFINITION" ⇒ ArgumentDefinition - case "INTERFACE" ⇒ Interface - case "UNION" ⇒ Union - case "ENUM" ⇒ Enum - case "ENUM_VALUE" ⇒ EnumValue - case "INPUT_OBJECT" ⇒ InputObject - case "INPUT_FIELD_DEFINITION" ⇒ InputFieldDefinition + case "QUERY" => Query + case "MUTATION" => Mutation + case "SUBSCRIPTION" => Subscription + case "FIELD" => Field + case "FRAGMENT_DEFINITION" => FragmentDefinition + case "FRAGMENT_SPREAD" => FragmentSpread + case "INLINE_FRAGMENT" => InlineFragment + case "VARIABLE_DEFINITION" => VariableDefinition + + case "SCHEMA" => Schema + case "SCALAR" => Scalar + case "OBJECT" => Object + case "FIELD_DEFINITION" => FieldDefinition + case "ARGUMENT_DEFINITION" => ArgumentDefinition + case "INTERFACE" => Interface + case "UNION" => Union + case "ENUM" => Enum + case "ENUM_VALUE" => EnumValue + case "INPUT_OBJECT" => InputObject + case "INPUT_FIELD_DEFINITION" => InputFieldDefinition } def toSpecString(location: DirectiveLocation.Value): String = location match { - case Query ⇒ "QUERY" - case Mutation ⇒ "MUTATION" - case Subscription ⇒ "SUBSCRIPTION" - case Field ⇒ "FIELD" - case FragmentDefinition ⇒ "FRAGMENT_DEFINITION" - case FragmentSpread ⇒ "FRAGMENT_SPREAD" - case InlineFragment ⇒ "INLINE_FRAGMENT" - case VariableDefinition ⇒ "VARIABLE_DEFINITION" - - case Schema ⇒ "SCHEMA" - case Scalar ⇒ "SCALAR" - case Object ⇒ "OBJECT" - case FieldDefinition ⇒ "FIELD_DEFINITION" - case ArgumentDefinition ⇒ "ARGUMENT_DEFINITION" - case Interface ⇒ "INTERFACE" - case Union ⇒ "UNION" - case Enum ⇒ "ENUM" - case EnumValue ⇒ "ENUM_VALUE" - case InputObject ⇒ "INPUT_OBJECT" - case InputFieldDefinition ⇒ "INPUT_FIELD_DEFINITION" + case Query => "QUERY" + case Mutation => "MUTATION" + case Subscription => "SUBSCRIPTION" + case Field => "FIELD" + case FragmentDefinition => "FRAGMENT_DEFINITION" + case FragmentSpread => "FRAGMENT_SPREAD" + case InlineFragment => "INLINE_FRAGMENT" + case VariableDefinition => "VARIABLE_DEFINITION" + + case Schema => "SCHEMA" + case Scalar => "SCALAR" + case Object => "OBJECT" + case FieldDefinition => "FIELD_DEFINITION" + case ArgumentDefinition => "ARGUMENT_DEFINITION" + case Interface => "INTERFACE" + case Union => "UNION" + case Enum => "ENUM" + case EnumValue => "ENUM_VALUE" + case InputObject => "INPUT_OBJECT" + case InputFieldDefinition => "INPUT_FIELD_DEFINITION" } } @@ -752,7 +752,7 @@ case class Directive( description: Option[String] = None, arguments: List[Argument[_]] = Nil, locations: Set[DirectiveLocation.Value] = Set.empty, - shouldInclude: DirectiveContext ⇒ Boolean = _ ⇒ true) extends HasArguments with Named { + shouldInclude: DirectiveContext => Boolean = _ => true) extends HasArguments with Named { def rename(newName: String) = copy(name = newName).asInstanceOf[this.type] def toAst: ast.DirectiveDefinition = SchemaRenderer.renderDirective(this) } @@ -787,17 +787,17 @@ case class Schema[Ctx, Val]( val sameSangriaType = t1.getClass.getName == t2.getClass.getName (t1, t2) match { - case (ot1: ObjectType[_, _], ot2: ObjectType[_, _]) ⇒ sameSangriaType && (ot1.valClass == ot2.valClass) - case _ ⇒ sameSangriaType + case (ot1: ObjectType[_, _], ot2: ObjectType[_, _]) => sameSangriaType && (ot1.valClass == ot2.valClass) + case _ => sameSangriaType } } def typeConflict(name: String, t1: Type, t2: Type, parentInfo: String) = (t1, t2) match { - case (ot1: ObjectType[_, _], ot2: ObjectType[_, _]) ⇒ + case (ot1: ObjectType[_, _], ot2: ObjectType[_, _]) => throw SchemaValidationException(Vector(ConflictingObjectTypeCaseClassViolation(name, parentInfo))) - case _ ⇒ + case _ => val conflictingTypes = List(t1, t2).map(_.getClass.getSimpleName) throw SchemaValidationException(Vector(ConflictingTypeDefinitionViolation( @@ -806,110 +806,110 @@ case class Schema[Ctx, Val]( def updated(priority: Int, name: String, tpe: Type with Named, result: Map[String, (Int, Type with Named)], parentInfo: String) = result get name match { - case Some(found) if !sameType(found._2, tpe) ⇒ typeConflict(name, found._2, tpe, parentInfo) - case Some(_) ⇒ result - case None ⇒ result.updated(name, priority → tpe) + case Some(found) if !sameType(found._2, tpe) => typeConflict(name, found._2, tpe, parentInfo) + case Some(_) => result + case None => result.updated(name, priority -> tpe) } def collectTypes(parentInfo: String, priority: Int, tpe: Type, result: Map[String, (Int, Type with Named)]): Map[String, (Int, Type with Named)] = { tpe match { - case null ⇒ + case null => throw new IllegalStateException( s"A `null` value was provided instead of type for $parentInfo.\n" + "This can happen if you have recursive type definition or circular references within your type graph.\n" + "Please use no-arg function to provide fields for such types.\n" + "You can find more info in the docs: http://sangria-graphql.org/learn/#circular-references-and-recursive-types") - case t: Named if result contains t.name ⇒ + case t: Named if result contains t.name => result get t.name match { - case Some(found) if !sameType(found._2, t) && t.isInstanceOf[ScalarAlias[_, _]] && found._2.isInstanceOf[ScalarType[_]] ⇒ result - case Some(found) if !sameType(found._2, t) ⇒ typeConflict(t.name, found._2, t, parentInfo) - case _ ⇒ result + case Some(found) if !sameType(found._2, t) && t.isInstanceOf[ScalarAlias[_, _]] && found._2.isInstanceOf[ScalarType[_]] => result + case Some(found) if !sameType(found._2, t) => typeConflict(t.name, found._2, t, parentInfo) + case _ => result } - case OptionType(ofType) ⇒ collectTypes(parentInfo, priority, ofType, result) - case OptionInputType(ofType) ⇒ collectTypes(parentInfo, priority, ofType, result) - case ListType(ofType) ⇒ collectTypes(parentInfo, priority, ofType, result) - case ListInputType(ofType) ⇒ collectTypes(parentInfo, priority, ofType, result) - - case t @ ScalarType(name, _, _, _, _, _, _, _, _) if BuiltinScalars.contains(t) ⇒ updated(40, name, t, result, parentInfo) - case t @ ScalarType(name, _, _, _, _, _, _, _, _) ⇒ updated(priority, name, t, result, parentInfo) - case ScalarAlias(aliasFor, _, _) ⇒ updated(priority, aliasFor.name, aliasFor, result, parentInfo) - case t @ EnumType(name, _, _, _, _) ⇒ updated(priority, name, t, result, parentInfo) - case t @ InputObjectType(name, _, _, _, _) ⇒ + case OptionType(ofType) => collectTypes(parentInfo, priority, ofType, result) + case OptionInputType(ofType) => collectTypes(parentInfo, priority, ofType, result) + case ListType(ofType) => collectTypes(parentInfo, priority, ofType, result) + case ListInputType(ofType) => collectTypes(parentInfo, priority, ofType, result) + + case t @ ScalarType(name, _, _, _, _, _, _, _, _) if BuiltinScalars.contains(t) => updated(40, name, t, result, parentInfo) + case t @ ScalarType(name, _, _, _, _, _, _, _, _) => updated(priority, name, t, result, parentInfo) + case ScalarAlias(aliasFor, _, _) => updated(priority, aliasFor.name, aliasFor, result, parentInfo) + case t @ EnumType(name, _, _, _, _) => updated(priority, name, t, result, parentInfo) + case t @ InputObjectType(name, _, _, _, _) => t.fields.foldLeft(updated(priority, name, t, result, parentInfo)) { - case (acc, field) ⇒ + case (acc, field) => collectTypes(s"a field '${field.name}' of '$name' input object type", priority, field.fieldType, acc) } - case t: ObjectLikeType[_, _] ⇒ + case t: ObjectLikeType[_, _] => val own = t.fields.foldLeft(updated(priority, t.name, t, result, parentInfo)) { - case (acc, field) ⇒ + case (acc, field) => val fromArgs = field.arguments.foldLeft(collectTypes(s"a field '${field.name}' of '${t.name}' type", priority, field.fieldType, acc)) { - case (aacc, arg) ⇒ collectTypes(s"an argument '${arg.name}' defined in field '${field.name}' of '${t.name}' type", priority, arg.argumentType, aacc) + case (aacc, arg) => collectTypes(s"an argument '${arg.name}' defined in field '${field.name}' of '${t.name}' type", priority, arg.argumentType, aacc) } field.manualPossibleTypes().foldLeft(fromArgs) { - case (acc, objectType) ⇒ collectTypes(s"a manualPossibleType defined in '${t.name}' type", priority, objectType, acc) + case (acc, objectType) => collectTypes(s"a manualPossibleType defined in '${t.name}' type", priority, objectType, acc) } } val withPossible = t match { - case i: InterfaceType[_, _] ⇒ + case i: InterfaceType[_, _] => i.manualPossibleTypes().foldLeft(own) { - case (acc, objectType) ⇒ collectTypes(s"a manualPossibleType defined in '${i.name}' type", priority, objectType, acc) + case (acc, objectType) => collectTypes(s"a manualPossibleType defined in '${i.name}' type", priority, objectType, acc) } - case _ ⇒ own + case _ => own } t.interfaces.foldLeft(withPossible) { - case (acc, interface) ⇒ collectTypes(s"an interface defined in '${t.name}' type", priority, interface, acc) + case (acc, interface) => collectTypes(s"an interface defined in '${t.name}' type", priority, interface, acc) } - case t @ UnionType(name, _, types, _, _) ⇒ - types.foldLeft(updated(priority, name, t, result, parentInfo)) {case (acc, tpe) ⇒ collectTypes(s"a '$name' type", priority, tpe, acc)} + case t @ UnionType(name, _, types, _, _) => + types.foldLeft(updated(priority, name, t, result, parentInfo)) {case (acc, tpe) => collectTypes(s"a '$name' type", priority, tpe, acc)} } } val schemaTypes = collectTypes("a '__Schema' type", 30, introspection.__Schema, Map.empty) val queryTypes = collectTypes("a query type", 20, query, schemaTypes) - val queryTypesWithAdditions = additionalTypes.foldLeft(queryTypes){case (acc, tpe) ⇒ collectTypes("additional type", 10, tpe, acc)} + val queryTypesWithAdditions = additionalTypes.foldLeft(queryTypes){case (acc, tpe) => collectTypes("additional type", 10, tpe, acc)} val queryAndSubTypes = mutation map (collectTypes("a mutation type", 10, _, queryTypesWithAdditions)) getOrElse queryTypesWithAdditions val queryAndSubAndMutTypes = subscription map (collectTypes("a subscription type", 10, _, queryAndSubTypes)) getOrElse queryAndSubTypes queryAndSubAndMutTypes } - lazy val typeList: Vector[Type with Named] = types.values.toVector.sortBy(t ⇒ t._1 + t._2.name).map(_._2) + lazy val typeList: Vector[Type with Named] = types.values.toVector.sortBy(t => t._1 + t._2.name).map(_._2) lazy val availableTypeNames: Vector[String] = typeList map (_.name) - lazy val allTypes: Map[String, Type with Named] = types collect {case (name, (_, tpe)) ⇒ name → tpe} - lazy val inputTypes = types collect {case (name, (_, tpe: InputType[_])) ⇒ name → tpe} - lazy val outputTypes = types collect {case (name, (_, tpe: OutputType[_])) ⇒ name → tpe} - lazy val scalarTypes = types collect {case (name, (_, tpe: ScalarType[_])) ⇒ name → tpe} + lazy val allTypes: Map[String, Type with Named] = types collect {case (name, (_, tpe)) => name -> tpe} + lazy val inputTypes = types collect {case (name, (_, tpe: InputType[_])) => name -> tpe} + lazy val outputTypes = types collect {case (name, (_, tpe: OutputType[_])) => name -> tpe} + lazy val scalarTypes = types collect {case (name, (_, tpe: ScalarType[_])) => name -> tpe} lazy val unionTypes: Map[String, UnionType[_]] = types.filter(_._2._2.isInstanceOf[UnionType[_]]).mapValues(_._2.asInstanceOf[UnionType[_]]).toMap lazy val directivesByName = directives groupBy (_.name) mapValues (_.head) def getInputType(tpe: ast.Type): Option[InputType[_]] = tpe match { - case ast.NamedType(name, _) ⇒ inputTypes get name map (OptionInputType(_)) - case ast.NotNullType(ofType, _) ⇒ getInputType(ofType) collect {case OptionInputType(ot) ⇒ ot} - case ast.ListType(ofType, _) ⇒ getInputType(ofType) map (t ⇒ OptionInputType(ListInputType(t))) + case ast.NamedType(name, _) => inputTypes get name map (OptionInputType(_)) + case ast.NotNullType(ofType, _) => getInputType(ofType) collect {case OptionInputType(ot) => ot} + case ast.ListType(ofType, _) => getInputType(ofType) map (t => OptionInputType(ListInputType(t))) } def getInputType(tpe: IntrospectionTypeRef): Option[InputType[_]] = tpe match { - case IntrospectionNamedTypeRef(_, name) ⇒ inputTypes get name map (OptionInputType(_)) - case IntrospectionNonNullTypeRef(ofType) ⇒ getInputType(ofType) collect {case OptionInputType(ot) ⇒ ot} - case IntrospectionListTypeRef(ofType) ⇒ getInputType(ofType) map (t ⇒ OptionInputType(ListInputType(t))) + case IntrospectionNamedTypeRef(_, name) => inputTypes get name map (OptionInputType(_)) + case IntrospectionNonNullTypeRef(ofType) => getInputType(ofType) collect {case OptionInputType(ot) => ot} + case IntrospectionListTypeRef(ofType) => getInputType(ofType) map (t => OptionInputType(ListInputType(t))) } def getOutputType(tpe: ast.Type, topLevel: Boolean = false): Option[OutputType[_]] = tpe match { - case ast.NamedType(name, _) ⇒ outputTypes get name map (ot ⇒ if (topLevel) ot else OptionType(ot)) - case ast.NotNullType(ofType, _) ⇒ getOutputType(ofType) collect {case OptionType(ot) ⇒ ot} - case ast.ListType(ofType, _) ⇒ getOutputType(ofType) map (ListType(_)) + case ast.NamedType(name, _) => outputTypes get name map (ot => if (topLevel) ot else OptionType(ot)) + case ast.NotNullType(ofType, _) => getOutputType(ofType) collect {case OptionType(ot) => ot} + case ast.ListType(ofType, _) => getOutputType(ofType) map (ListType(_)) } lazy val directImplementations: Map[String, Vector[ObjectLikeType[_, _]]] = { typeList - .collect{case objectLike: ObjectLikeType[_, _] ⇒ objectLike} - .flatMap(objectLike ⇒ objectLike.interfaces map (_.name → objectLike)) + .collect{case objectLike: ObjectLikeType[_, _] => objectLike} + .flatMap(objectLike => objectLike.interfaces map (_.name -> objectLike)) .groupBy(_._1) .mapValues(_ map (_._2)) .toMap @@ -917,17 +917,17 @@ case class Schema[Ctx, Val]( lazy val implementations: Map[String, Vector[ObjectType[_, _]]] = { def findConcreteTypes(tpe: ObjectLikeType[_, _]): Vector[ObjectType[_, _]] = tpe match { - case obj: ObjectType[_, _] ⇒ Vector(obj) - case interface: InterfaceType[_, _] ⇒ directImplementations(interface.name) flatMap findConcreteTypes + case obj: ObjectType[_, _] => Vector(obj) + case interface: InterfaceType[_, _] => directImplementations(interface.name) flatMap findConcreteTypes } directImplementations map { - case (name, directImpls) ⇒ name → directImpls.flatMap(findConcreteTypes).groupBy(_.name).map(_._2.head).toVector + case (name, directImpls) => name -> directImpls.flatMap(findConcreteTypes).groupBy(_.name).map(_._2.head).toVector } } lazy val possibleTypes: Map[String, Vector[ObjectType[_, _]]] = - implementations ++ unionTypes.values.map(ut ⇒ ut.name → ut.types.toVector) + implementations ++ unionTypes.values.map(ut => ut.name -> ut.types.toVector) def isPossibleType(baseTypeName: String, tpe: ObjectType[_, _]) = possibleTypes get baseTypeName exists (_ exists (_.name == tpe.name)) diff --git a/src/main/scala/sangria/schema/SchemaComparator.scala b/src/main/scala/sangria/schema/SchemaComparator.scala index 9036057f..c64f608b 100644 --- a/src/main/scala/sangria/schema/SchemaComparator.scala +++ b/src/main/scala/sangria/schema/SchemaComparator.scala @@ -16,13 +16,13 @@ object SchemaComparator { val oldTypes = oldSchema.availableTypeNames.toSet val newTypes = newSchema.availableTypeNames.toSet - val removed = oldTypes.diff(newTypes).toVector.map(name ⇒ + val removed = oldTypes.diff(newTypes).toVector.map(name => SchemaChange.TypeRemoved(oldSchema.types(name)._2)) - val added = newTypes.diff(oldTypes).toVector.map(name ⇒ + val added = newTypes.diff(oldTypes).toVector.map(name => SchemaChange.TypeAdded(newSchema.types(name)._2)) - val changed = oldTypes.intersect(newTypes).flatMap { name ⇒ + val changed = oldTypes.intersect(newTypes).flatMap { name => val oldType = oldSchema.types(name)._2 val newType = newSchema.types(name)._2 @@ -41,13 +41,13 @@ object SchemaComparator { val oldDirs = oldSchema.directives.map(_.name).toSet val newDirs = newSchema.directives.map(_.name).toSet - val removed = oldDirs.diff(newDirs).toVector.map(name ⇒ + val removed = oldDirs.diff(newDirs).toVector.map(name => SchemaChange.DirectiveRemoved(oldSchema.directivesByName(name))) - val added = newDirs.diff(oldDirs).toVector.map(name ⇒ + val added = newDirs.diff(oldDirs).toVector.map(name => SchemaChange.DirectiveAdded(newSchema.directivesByName(name))) - val changed = oldDirs.intersect(newDirs).flatMap { name ⇒ + val changed = oldDirs.intersect(newDirs).flatMap { name => val oldDir = oldSchema.directivesByName(name) val newDir = newSchema.directivesByName(name) @@ -76,10 +76,10 @@ object SchemaComparator { val oldLocs = oldDir.locations val newLocs = newDir.locations - val removed = oldLocs.diff(newLocs).toVector.map(loc ⇒ + val removed = oldLocs.diff(newLocs).toVector.map(loc => SchemaChange.DirectiveLocationRemoved(oldDir, loc)) - val added = newLocs.diff(oldLocs).toVector.map(loc ⇒ + val added = newLocs.diff(oldLocs).toVector.map(loc => SchemaChange.DirectiveLocationAdded(newDir, loc)) removed ++ added @@ -116,13 +116,13 @@ object SchemaComparator { def findChangesInTypes(oldType: Type with Named, newType: Type with Named): Vector[SchemaChange] = { val typeChanges = (oldType, newType) match { - case (o: EnumType[_], n: EnumType[_]) ⇒ findInEnumTypes(o, n) - case (o: UnionType[_], n: UnionType[_]) ⇒ findInUnionTypes(o, n) - case (o: ScalarType[_], n: ScalarType[_]) ⇒ findInScalarTypes(o, n) - case (o: InputObjectType[_], n: InputObjectType[_]) ⇒ findInInputObjectTypes(o, n) - case (o: ObjectType[_, _], n: ObjectType[_, _]) ⇒ findInObjectTypes(o, n) - case (o: InterfaceType[_, _], n: InterfaceType[_, _]) ⇒ findInInterfaceTypes(o, n) - case _ ⇒ Vector.empty + case (o: EnumType[_], n: EnumType[_]) => findInEnumTypes(o, n) + case (o: UnionType[_], n: UnionType[_]) => findInUnionTypes(o, n) + case (o: ScalarType[_], n: ScalarType[_]) => findInScalarTypes(o, n) + case (o: InputObjectType[_], n: InputObjectType[_]) => findInInputObjectTypes(o, n) + case (o: ObjectType[_, _], n: ObjectType[_, _]) => findInObjectTypes(o, n) + case (o: InterfaceType[_, _], n: InterfaceType[_, _]) => findInInterfaceTypes(o, n) + case _ => Vector.empty } typeChanges ++ findDescriptionChanged(oldType, newType, SchemaChange.TypeDescriptionChanged(newType, _, _)) @@ -132,10 +132,10 @@ object SchemaComparator { val oldTypes = oldType.types.map(_.name).toSet val newTypes = newType.types.map(_.name).toSet - val removed = oldTypes.diff(newTypes).toVector.map(name ⇒ + val removed = oldTypes.diff(newTypes).toVector.map(name => SchemaChange.UnionMemberRemoved(oldType, oldType.types.find(_.name == name).get)) - val added = newTypes.diff(oldTypes).toVector.map(name ⇒ + val added = newTypes.diff(oldTypes).toVector.map(name => SchemaChange.UnionMemberAdded(newType, newType.types.find(_.name == name).get)) val directiveChanges = findInAstDirs(oldType.astDirectives, newType.astDirectives, @@ -154,13 +154,13 @@ object SchemaComparator { val oldValues = oldType.values.map(_.name).toSet val newValues = newType.values.map(_.name).toSet - val removed = oldValues.diff(newValues).toVector.map(name ⇒ + val removed = oldValues.diff(newValues).toVector.map(name => SchemaChange.EnumValueRemoved(oldType, oldType.byName(name))) - val added = newValues.diff(oldValues).toVector.map(name ⇒ + val added = newValues.diff(oldValues).toVector.map(name => SchemaChange.EnumValueAdded(newType, newType.byName(name))) - val changed = oldValues.intersect(newValues).flatMap { name ⇒ + val changed = oldValues.intersect(newValues).flatMap { name => val oldValue = oldType.byName(name) val newValue = newType.byName(name) @@ -184,16 +184,16 @@ object SchemaComparator { val oldFields = oldType.fields.map(_.name).toSet val newFields = newType.fields.map(_.name).toSet - val removed = oldFields.diff(newFields).toVector.map(name ⇒ + val removed = oldFields.diff(newFields).toVector.map(name => SchemaChange.InputFieldRemoved(oldType, oldType.fieldsByName(name))) - val added = newFields.diff(oldFields).toVector.map { name ⇒ + val added = newFields.diff(oldFields).toVector.map { name => val field = newType.fieldsByName(name) SchemaChange.InputFieldAdded(newType, field, !isOptional(field)) } - val changed = oldFields.intersect(newFields).flatMap { name ⇒ + val changed = oldFields.intersect(newFields).flatMap { name => val oldField = oldType.fieldsByName(name) val newField = newType.fieldsByName(name) @@ -228,10 +228,10 @@ object SchemaComparator { val oldInts = oldType.allInterfaces.map(_.name).toSet val newInts = newType.allInterfaces.map(_.name).toSet - val removed = oldInts.diff(newInts).toVector.map(name ⇒ + val removed = oldInts.diff(newInts).toVector.map(name => SchemaChange.ObjectTypeInterfaceRemoved(oldType, oldType.interfaces.find(_.name == name).get)) - val added = newInts.diff(oldInts).toVector.map(name ⇒ + val added = newInts.diff(oldInts).toVector.map(name => SchemaChange.ObjectTypeInterfaceAdded(newType, newType.interfaces.find(_.name == name).get)) removed ++ added @@ -241,13 +241,13 @@ object SchemaComparator { val oldFields = oldType.fields.map(_.name).toSet val newFields = newType.fields.map(_.name).toSet - val removed = oldFields.diff(newFields).toVector.map(name ⇒ + val removed = oldFields.diff(newFields).toVector.map(name => SchemaChange.FieldRemoved(oldType, oldType.fieldsByName(name).head)) - val added = newFields.diff(oldFields).toVector.map(name ⇒ + val added = newFields.diff(oldFields).toVector.map(name => SchemaChange.FieldAdded(newType, newType.fieldsByName(name).head)) - val changed = oldFields.intersect(newFields).flatMap { name ⇒ + val changed = oldFields.intersect(newFields).flatMap { name => val oldField = oldType.fieldsByName(name).head val newField = newType.fieldsByName(name).head @@ -289,27 +289,27 @@ object SchemaComparator { private def findInArgs( oldArgs: List[Argument[_]], newArgs: List[Argument[_]], - added: (Argument[_], Boolean) ⇒ SchemaChange, - removed: Argument[_] ⇒ SchemaChange, - description: (Argument[_], Option[String], Option[String]) ⇒ SchemaChange, - default: (Argument[_], Option[ast.Value], Option[ast.Value]) ⇒ SchemaChange, - typeChange: (Argument[_], Boolean, InputType[_], InputType[_]) ⇒ SchemaChange, - dirAdded: (Argument[_], ast.Directive) ⇒ SchemaChange, - dirRemoved: (Argument[_], ast.Directive) ⇒ SchemaChange + added: (Argument[_], Boolean) => SchemaChange, + removed: Argument[_] => SchemaChange, + description: (Argument[_], Option[String], Option[String]) => SchemaChange, + default: (Argument[_], Option[ast.Value], Option[ast.Value]) => SchemaChange, + typeChange: (Argument[_], Boolean, InputType[_], InputType[_]) => SchemaChange, + dirAdded: (Argument[_], ast.Directive) => SchemaChange, + dirRemoved: (Argument[_], ast.Directive) => SchemaChange ): Vector[SchemaChange] = { val oldA = oldArgs.map(_.name).toSet val newA = newArgs.map(_.name).toSet - val remove = oldA.diff(newA).toVector.map(name ⇒ + val remove = oldA.diff(newA).toVector.map(name => removed(oldArgs.find(_.name == name).get)) - val add = newA.diff(oldA).toVector.map { name ⇒ + val add = newA.diff(oldA).toVector.map { name => val arg = newArgs.find(_.name == name).get added(arg, !isOptional(arg)) } - val changed = oldA.intersect(newA).flatMap { name ⇒ + val changed = oldA.intersect(newA).flatMap { name => val oldArg = oldArgs.find(_.name == name).get val newArg = newArgs.find(_.name == name).get @@ -323,8 +323,8 @@ object SchemaComparator { private def findInAstDirs( oldDirectives: Vector[ast.Directive], newDirectives: Vector[ast.Directive], - added: (ast.Directive) ⇒ SchemaChange, - removed: (ast.Directive) ⇒ SchemaChange + added: (ast.Directive) => SchemaChange, + removed: (ast.Directive) => SchemaChange ): Vector[SchemaChange] = { val oldD = oldDirectives.map(AstNode.withoutAstLocations(_)).toSet val newD = newDirectives.map(AstNode.withoutAstLocations(_)).toSet @@ -338,13 +338,13 @@ object SchemaComparator { private def findInArg( oldArg: Argument[_], newArg: Argument[_], - default: (Option[ast.Value], Option[ast.Value]) ⇒ SchemaChange, - typeChange: (Boolean, InputType[_], InputType[_]) ⇒ SchemaChange, - dirAdded: ast.Directive ⇒ SchemaChange, - dirRemoved: ast.Directive ⇒ SchemaChange + default: (Option[ast.Value], Option[ast.Value]) => SchemaChange, + typeChange: (Boolean, InputType[_], InputType[_]) => SchemaChange, + dirAdded: ast.Directive => SchemaChange, + dirRemoved: ast.Directive => SchemaChange ): Vector[SchemaChange] = { - val oldDefault = oldArg.defaultValue.flatMap(dv ⇒ DefaultValueRenderer.renderInputValue(dv, oldArg.argumentType, coercionHelper).map(v ⇒ AstNode.withoutAstLocations(v))) - val newDefault = newArg.defaultValue.flatMap(dv ⇒ DefaultValueRenderer.renderInputValue(dv, newArg.argumentType, coercionHelper).map(v ⇒ AstNode.withoutAstLocations(v))) + val oldDefault = oldArg.defaultValue.flatMap(dv => DefaultValueRenderer.renderInputValue(dv, oldArg.argumentType, coercionHelper).map(v => AstNode.withoutAstLocations(v))) + val newDefault = newArg.defaultValue.flatMap(dv => DefaultValueRenderer.renderInputValue(dv, newArg.argumentType, coercionHelper).map(v => AstNode.withoutAstLocations(v))) val withDefault = if (oldDefault != newDefault) @@ -369,8 +369,8 @@ object SchemaComparator { } private def findInInputFields(oldType: InputObjectType[_], newType: InputObjectType[_], oldField: InputField[_], newField: InputField[_]): Vector[SchemaChange] = { - val oldDefault = oldField.defaultValue.flatMap(dv ⇒ DefaultValueRenderer.renderInputValue(dv, oldField.fieldType, coercionHelper).map(v ⇒ AstNode.withoutAstLocations(v))) - val newDefault = newField.defaultValue.flatMap(dv ⇒ DefaultValueRenderer.renderInputValue(dv, newField.fieldType, coercionHelper).map(v ⇒ AstNode.withoutAstLocations(v))) + val oldDefault = oldField.defaultValue.flatMap(dv => DefaultValueRenderer.renderInputValue(dv, oldField.fieldType, coercionHelper).map(v => AstNode.withoutAstLocations(v))) + val newDefault = newField.defaultValue.flatMap(dv => DefaultValueRenderer.renderInputValue(dv, newField.fieldType, coercionHelper).map(v => AstNode.withoutAstLocations(v))) val withDefault = if (oldDefault != newDefault) @@ -396,35 +396,35 @@ object SchemaComparator { } private def isOptional(field: InputField[_]) = field.fieldType match { - case _: OptionInputType[_] ⇒ true - case _ ⇒ false + case _: OptionInputType[_] => true + case _ => false } private def isOptional(argument: Argument[_]) = argument.argumentType match { - case _: OptionInputType[_] ⇒ true - case _ ⇒ false + case _: OptionInputType[_] => true + case _ => false } private def nonContainer(field: InputField[_]) = field.fieldType match { - case OptionInputType(ofType) ⇒ ofType - case tpe ⇒ tpe + case OptionInputType(ofType) => ofType + case tpe => tpe } private def nonContainer(field: Field[_, _]) = field.fieldType match { - case OptionType(ofType) ⇒ ofType - case tpe ⇒ tpe + case OptionType(ofType) => ofType + case tpe => tpe } private def nonContainer(argument: Argument[_]) = argument.argumentType match { - case OptionInputType(ofType) ⇒ ofType - case tpe ⇒ tpe + case OptionInputType(ofType) => ofType + case tpe => tpe } - private def findDescriptionChanged(o: HasDescription, n: HasDescription, fn: (Option[String], Option[String]) ⇒ SchemaChange): Vector[SchemaChange] = + private def findDescriptionChanged(o: HasDescription, n: HasDescription, fn: (Option[String], Option[String]) => SchemaChange): Vector[SchemaChange] = if (o.description != n.description) Vector(fn(o.description, n.description)) else Vector.empty - private def findDeprecationChanged(o: HasDeprecation, n: HasDeprecation, fn: (Option[String], Option[String]) ⇒ SchemaChange): Vector[SchemaChange] = + private def findDeprecationChanged(o: HasDeprecation, n: HasDeprecation, fn: (Option[String], Option[String]) => SchemaChange): Vector[SchemaChange] = if (o.deprecationReason != n.deprecationReason) Vector(fn(o.deprecationReason, n.deprecationReason)) else Vector.empty @@ -539,13 +539,13 @@ object SchemaChange { extends AbstractChange(s"Field `${field.name}` was deprecated in `${tpe.name}` type", false) with DeprecationChange case class InputFieldDefaultChanged(tpe: InputObjectType[_], field: InputField[_], oldDefault: Option[ast.Value], newDefault: Option[ast.Value]) - extends AbstractChange(s"`${tpe.name}.${field.name}` default value changed from ${oldDefault.fold("none")(d ⇒ "`" + d.renderCompact + "`")} to ${newDefault.fold("none")(d ⇒ "`" + d.renderCompact + "`")}", false) with TypeChange + extends AbstractChange(s"`${tpe.name}.${field.name}` default value changed from ${oldDefault.fold("none")(d => "`" + d.renderCompact + "`")} to ${newDefault.fold("none")(d => "`" + d.renderCompact + "`")}", false) with TypeChange case class ObjectTypeArgumentDefaultChanged(tpe: ObjectLikeType[_, _], field: Field[_, _], argument: Argument[_], oldDefault: Option[ast.Value], newDefault: Option[ast.Value]) - extends AbstractChange(s"`${tpe.name}.${field.name}(${argument.name})` default value changed from ${oldDefault.fold("none")(d ⇒ "`" + d.renderCompact + "`")} to ${newDefault.fold("none")(d ⇒ "`" + d.renderCompact + "`")}", false, true) with TypeChange + extends AbstractChange(s"`${tpe.name}.${field.name}(${argument.name})` default value changed from ${oldDefault.fold("none")(d => "`" + d.renderCompact + "`")} to ${newDefault.fold("none")(d => "`" + d.renderCompact + "`")}", false, true) with TypeChange case class DirectiveArgumentDefaultChanged(directive: Directive, argument: Argument[_], oldDefault: Option[ast.Value], newDefault: Option[ast.Value]) - extends AbstractChange(s"`${directive.name}(${argument.name})` default value changed from ${oldDefault.fold("none")(d ⇒ "`" + d.renderCompact + "`")} to ${newDefault.fold("none")(d ⇒ "`" + d.renderCompact + "`")}", false, true) + extends AbstractChange(s"`${directive.name}(${argument.name})` default value changed from ${oldDefault.fold("none")(d => "`" + d.renderCompact + "`")} to ${newDefault.fold("none")(d => "`" + d.renderCompact + "`")}", false, true) case class ObjectTypeInterfaceAdded(tpe: ObjectType[_, _], interface: InterfaceType[_, _]) extends AbstractChange(s"`${tpe.name}` object type now implements `${interface.name}` interface", false, true) with TypeChange @@ -672,22 +672,22 @@ object SchemaChange { extends AbstractChange(s"`${tpe.name}.${field.name}` field type changed from `${SchemaRenderer.renderTypeName(oldFiledType)}` to `${SchemaRenderer.renderTypeName(newFieldType)}`", breaking) with TypeChange case class SchemaMutationTypeChanged(oldType: Option[ObjectType[_, _]], newType: Option[ObjectType[_, _]]) - extends AbstractChange(s"Schema mutation type changed from ${oldType.fold("none")(t ⇒ "`" + t.name + "`")} to ${newType.fold("none")(t ⇒ "`" + t.name + "`")} type", oldType.nonEmpty) + extends AbstractChange(s"Schema mutation type changed from ${oldType.fold("none")(t => "`" + t.name + "`")} to ${newType.fold("none")(t => "`" + t.name + "`")} type", oldType.nonEmpty) case class SchemaSubscriptionTypeChanged(oldType: Option[ObjectType[_, _]], newType: Option[ObjectType[_, _]]) - extends AbstractChange(s"Schema subscription type changed from ${oldType.fold("none")(t ⇒ "`" + t.name + "`")} to ${newType.fold("none")(t ⇒ "`" + t.name + "`")} type", oldType.nonEmpty) + extends AbstractChange(s"Schema subscription type changed from ${oldType.fold("none")(t => "`" + t.name + "`")} to ${newType.fold("none")(t => "`" + t.name + "`")} type", oldType.nonEmpty) private val AnArticleLetters = Set('a', 'e', 'i', 'o') private def kind(tpe: Type) = tpe match { - case _: ObjectType[_, _] ⇒ "Object" - case _: InterfaceType[_, _] ⇒ "Interface" - case _: ScalarType[_] ⇒ "Scalar" - case _: ScalarAlias[_, _] ⇒ "Scalar" - case _: UnionType[_] ⇒ "Union" - case _: EnumType[_] ⇒ "Enum" - case _: InputObjectType[_] ⇒ "InputObject" - case t ⇒ throw new IllegalStateException(s"Unsupported type kind: $t") + case _: ObjectType[_, _] => "Object" + case _: InterfaceType[_, _] => "Interface" + case _: ScalarType[_] => "Scalar" + case _: ScalarAlias[_, _] => "Scalar" + case _: UnionType[_] => "Union" + case _: EnumType[_] => "Enum" + case _: InputObjectType[_] => "InputObject" + case t => throw new IllegalStateException(s"Unsupported type kind: $t") } private def article(word: String) = diff --git a/src/main/scala/sangria/schema/SchemaValidationRule.scala b/src/main/scala/sangria/schema/SchemaValidationRule.scala index 4151f7cf..345efc63 100644 --- a/src/main/scala/sangria/schema/SchemaValidationRule.scala +++ b/src/main/scala/sangria/schema/SchemaValidationRule.scala @@ -47,29 +47,29 @@ object DefaultValuesValidationRule extends SchemaValidationRule { def validate[Ctx, Val](schema: Schema[Ctx, Val]) = { val coercionHelper = ValueCoercionHelper.default - def validate(prefix: ⇒ String, path: List[String], tpe: InputType[_])(defaultValue: (_, ToInput[_, _])) = { + def validate(prefix: => String, path: List[String], tpe: InputType[_])(defaultValue: (_, ToInput[_, _])) = { val (default, toInput) = defaultValue.asInstanceOf[(Any, ToInput[Any, Any])] val (inputValue, iu) = toInput.toInput(default) coercionHelper.coerceInputValue(tpe, path, inputValue, None, None, CoercedScalaResultMarshaller.default, CoercedScalaResultMarshaller.default, false, prefix)(iu) match { - case Left(violations) ⇒ violations - case Right(violations) ⇒ Nil + case Left(violations) => violations + case Right(violations) => Nil } } val inputTypeViolations = schema.inputTypes.values.toList flatMap { - case it: InputObjectType[_] ⇒ - it.fields flatMap (f ⇒ + case it: InputObjectType[_] => + it.fields flatMap (f => f.defaultValue map validate(s"Invalid default value of field '${f.name}' in input type '${it.name}'. ", it.name :: f.name :: Nil, f.inputValueType) getOrElse Nil) - case _ ⇒ Nil + case _ => Nil } val outputTypeViolations = schema.outputTypes.values.toList flatMap { - case ot: ObjectLikeType[_, _] ⇒ - ot.fields flatMap (f ⇒ - f.arguments flatMap (a ⇒ + case ot: ObjectLikeType[_, _] => + ot.fields flatMap (f => + f.arguments flatMap (a => a.defaultValue map validate(s"Invalid default value of argument '${a.name}' in field '${f.name}' defined in output type '${ot.name}'. ", ot.name :: f.name :: ("[" + a.name + "]") :: Nil, a.inputValueType) getOrElse Nil)) - case _ ⇒ Nil + case _ => Nil } inputTypeViolations ++ outputTypeViolations @@ -80,15 +80,15 @@ object InterfaceImplementationValidationRule extends SchemaValidationRule { private def validateObjectType[Ctx, Val](schema: Schema[Ctx, Val], objTpe: ObjectType[_, _], intTpe: InterfaceType[_, _]): Vector[Violation] = { val objFields: Map[String, Vector[Field[_, _]]] = objTpe.ownFields.groupBy(_.name) - intTpe.ownFields.flatMap { intField ⇒ + intTpe.ownFields.flatMap { intField => objFields.get(intField.name) match { - case None ⇒ + case None => // we allow object type to inherit fields from the interfaces // without explicitly defining them, but only when it is not // defined though SDL. Vector.empty - case Some(objField) if !TypeComparators.isSubType(schema, objField.head.fieldType, intField.fieldType) ⇒ + case Some(objField) if !TypeComparators.isSubType(schema, objField.head.fieldType, intField.fieldType) => Vector(InvalidImplementationFieldTypeViolation( intTpe.name, objTpe.name, @@ -98,15 +98,15 @@ object InterfaceImplementationValidationRule extends SchemaValidationRule { SchemaElementValidator.sourceMapper(schema), SchemaElementValidator.location(objField.head) ++ SchemaElementValidator.location(intField))) - case Some(objField) ⇒ - val intArgViolations = intField.arguments.flatMap { iarg ⇒ + case Some(objField) => + val intArgViolations = intField.arguments.flatMap { iarg => objField.head.arguments.find(_.name == iarg.name) match { - case None ⇒ + case None => Vector(MissingImplementationFieldArgumentViolation(intTpe.name, objTpe.name, intField.name, iarg.name, SchemaElementValidator.sourceMapper(schema), SchemaElementValidator.location(iarg) ++ SchemaElementValidator.location(objField.head))) - case Some(oarg) if !TypeComparators.isEqualType(iarg.argumentType, oarg.argumentType) ⇒ + case Some(oarg) if !TypeComparators.isEqualType(iarg.argumentType, oarg.argumentType) => Vector(InvalidImplementationFieldArgumentTypeViolation( intTpe.name, objTpe.name, @@ -117,14 +117,14 @@ object InterfaceImplementationValidationRule extends SchemaValidationRule { SchemaElementValidator.sourceMapper(schema), SchemaElementValidator.location(iarg) ++ SchemaElementValidator.location(oarg))) - case _ ⇒ Nil + case _ => Nil } } val objArgViolations = objField.head.arguments - .filterNot(oa ⇒ intField.arguments.exists(_.name == oa.name)) + .filterNot(oa => intField.arguments.exists(_.name == oa.name)) .flatMap { - case oarg if !oarg.argumentType.isInstanceOf[OptionInputType[_]] ⇒ + case oarg if !oarg.argumentType.isInstanceOf[OptionInputType[_]] => Vector(ImplementationExtraFieldArgumentNotOptionalViolation( intTpe.name, objTpe.name, @@ -133,7 +133,7 @@ object InterfaceImplementationValidationRule extends SchemaValidationRule { SchemaRenderer.renderTypeName(oarg.argumentType), SchemaElementValidator.sourceMapper(schema), SchemaElementValidator.location(oarg) ++ SchemaElementValidator.location(intField))) - case _ ⇒ Nil + case _ => Nil } intArgViolations ++ objArgViolations @@ -143,10 +143,10 @@ object InterfaceImplementationValidationRule extends SchemaValidationRule { def validate[Ctx, Val](schema: Schema[Ctx, Val]) = schema.possibleTypes.toList.flatMap { - case (intName, objTypes) ⇒ + case (intName, objTypes) => schema.outputTypes(intName) match { - case intTpe: InterfaceType[_, _] ⇒ objTypes.flatMap(validateObjectType(schema, _, intTpe)) - case _ ⇒ Nil + case intTpe: InterfaceType[_, _] => objTypes.flatMap(validateObjectType(schema, _, intTpe)) + case _ => Nil } } } @@ -156,30 +156,30 @@ object SubscriptionFieldsValidationRule extends SchemaValidationRule { val subsName = schema.subscription.map(_.name) def subscriptionTag(tag: FieldTag) = tag match { - case SubscriptionField(_) ⇒ true - case _ ⇒ false + case SubscriptionField(_) => true + case _ => false } val otherViolations = schema.typeList.flatMap { - case obj: ObjectLikeType[_, _] if subsName.isDefined && subsName.get != obj.name ⇒ - obj.uniqueFields.filter(_.tags exists subscriptionTag).map(f ⇒ + case obj: ObjectLikeType[_, _] if subsName.isDefined && subsName.get != obj.name => + obj.uniqueFields.filter(_.tags exists subscriptionTag).map(f => InvalidSubscriptionFieldViolation(obj.name, f.name)) - case _ ⇒ Nil + case _ => Nil } - val subsViolations = schema.subscription.fold(List.empty[Violation]) { subsType ⇒ + val subsViolations = schema.subscription.fold(List.empty[Violation]) { subsType => val fields = subsType.uniqueFields - val nonSubscription = fields.filter(f ⇒ !f.tags.exists(subscriptionTag)) + val nonSubscription = fields.filter(f => !f.tags.exists(subscriptionTag)) if (nonSubscription.size == fields.size) { Nil } else if (nonSubscription.isEmpty) { if (fields.isEmpty) Nil else { - val first = fields.head.tags.collectFirst{case SubscriptionField(s) ⇒ s}.get + val first = fields.head.tags.collectFirst{case SubscriptionField(s) => s}.get - val differentFields = fields.tail.filter(f ⇒ f.tags.collectFirst{case SubscriptionField(s) if !first.supported(s.asInstanceOf[SubscriptionStream[({type T[X]})#T]]) ⇒ s}.nonEmpty) + val differentFields = fields.tail.filter(f => f.tags.collectFirst{case SubscriptionField(s) if !first.supported(s.asInstanceOf[SubscriptionStream[({type T[X]})#T]]) => s}.nonEmpty) if (differentFields.nonEmpty) List(NotAllSubscriptionFieldsHaveSameStreamViolation(subsType.name, differentFields.map(_.name))) @@ -309,10 +309,10 @@ object ContainerMembersValidator extends SchemaElementValidator { val nonUnique = tpe.types.groupBy(_.name).toVector.collect { - case (memberName, dup) if dup.size > 1 ⇒ + case (memberName, dup) if dup.size > 1 => val astMembers = tpe.astNodes.collect { - case astUnion: UnionTypeDefinition ⇒ astUnion.types - case astUnion: UnionTypeExtensionDefinition ⇒ astUnion.types + case astUnion: UnionTypeDefinition => astUnion.types + case astUnion: UnionTypeExtensionDefinition => astUnion.types } val locations = astMembers.flatten.filter(_.name == memberName).flatMap(_.location).toList @@ -330,7 +330,7 @@ object ContainerMembersValidator extends SchemaElementValidator { val nonUnique = tpe.values.groupBy(_.name).toVector.collect { - case (valueName, dup) if dup.size > 1 ⇒ + case (valueName, dup) if dup.size > 1 => NonUniqueEnumValuesViolation(tpe.name, valueName, sourceMapper(schema), dup.flatMap(location)) } @@ -345,7 +345,7 @@ object ContainerMembersValidator extends SchemaElementValidator { val nonUnique = tpe.fields.groupBy(_.name).toVector.collect { - case (fieldName, dup) if dup.size > 1 ⇒ + case (fieldName, dup) if dup.size > 1 => NonUniqueInputFieldsViolation(tpe.name, fieldName, sourceMapper(schema), dup.flatMap(location)) } @@ -357,10 +357,10 @@ object ContainerMembersValidator extends SchemaElementValidator { val nonUnique = tpe.interfaces.groupBy(_.name).toVector.collect { - case (intName, dup) if dup.size > 1 ⇒ + case (intName, dup) if dup.size > 1 => val astMembers = tpe.astNodes.collect { - case astUnion: ObjectTypeDefinition ⇒ astUnion.interfaces - case astUnion: ObjectTypeExtensionDefinition ⇒ astUnion.interfaces + case astUnion: ObjectTypeDefinition => astUnion.interfaces + case astUnion: ObjectTypeExtensionDefinition => astUnion.interfaces } val locations = astMembers.flatten.filter(_.name == intName).flatMap(_.location).toList @@ -381,7 +381,7 @@ object ContainerMembersValidator extends SchemaElementValidator { val nonUnique = tpe.ownFields.groupBy(_.name).toVector.collect { - case (fieldName, dup) if dup.size > 1 ⇒ + case (fieldName, dup) if dup.size > 1 => NonUniqueFieldsViolation(kind, tpe.name, fieldName, sourceMapper(schema), dup.flatMap(location).toList) } @@ -390,13 +390,13 @@ object ContainerMembersValidator extends SchemaElementValidator { override def validateField(schema: Schema[_, _], tpe: ObjectLikeType[_, _], field: Field[_, _]) = field.arguments.groupBy(_.name).toVector.collect { - case (argName, dup) if dup.size > 1 ⇒ + case (argName, dup) if dup.size > 1 => NonUniqueFieldArgumentsViolation(tpe.name, field.name, argName, sourceMapper(schema), dup.flatMap(location)) } override def validateDirective(schema: Schema[_, _], tpe: Directive) = tpe.arguments.groupBy(_.name).toVector.collect { - case (argName, dup) if dup.size > 1 ⇒ + case (argName, dup) if dup.size > 1 => NonUniqueDirectiveArgumentsViolation(tpe.name, argName, sourceMapper(schema), dup.flatMap(location)) } } @@ -435,7 +435,7 @@ trait SchemaElementValidator { object SchemaElementValidator { def sourceMapper(schema: Schema[_, _]): Option[SourceMapper] = - schema.astNodes.collectFirst{case doc: Document ⇒ doc.sourceMapper}.flatten + schema.astNodes.collectFirst{case doc: Document => doc.sourceMapper}.flatten def location(elem: HasAstInfo): List[AstLocation] = elem.astNodes.flatMap(_.location).toList @@ -450,41 +450,41 @@ class FullSchemaTraversalValidationRule(validators: SchemaElementValidator*) ext def add(vs: Vector[Violation]): Unit = if (vs.nonEmpty) violations ++= vs - def validate(fn: SchemaElementValidator ⇒ Vector[Violation]) = - validators.foreach(v ⇒ add(fn(v))) + def validate(fn: SchemaElementValidator => Vector[Violation]) = + validators.foreach(v => add(fn(v))) schema.typeList.foreach { - case tpe: EnumType[_] ⇒ + case tpe: EnumType[_] => validate(_.validateEnumType(schema, tpe)) - tpe.values.foreach(v ⇒ validate(_.validateEnumValue(schema, tpe, v))) - case tpe: ScalarType[_] ⇒ + tpe.values.foreach(v => validate(_.validateEnumValue(schema, tpe, v))) + case tpe: ScalarType[_] => validate(_.validateScalarType(schema, tpe)) - case tpe: UnionType[_] ⇒ + case tpe: UnionType[_] => validate(_.validateUnionType(schema, tpe)) - case tpe: InputObjectType[_] ⇒ + case tpe: InputObjectType[_] => validate(_.validateInputObjectType(schema, tpe)) - tpe.fields.foreach(f ⇒ validate(_.validateInputField(schema, tpe, f))) - case tpe: ObjectType[_, _] ⇒ + tpe.fields.foreach(f => validate(_.validateInputField(schema, tpe, f))) + case tpe: ObjectType[_, _] => validate(_.validateObjectType(schema, tpe)) - tpe.fields.foreach { f ⇒ + tpe.fields.foreach { f => validate(_.validateField(schema, tpe, f)) - f.arguments.foreach(a ⇒ validate(_.validateFieldArgument(schema, tpe, f, a))) + f.arguments.foreach(a => validate(_.validateFieldArgument(schema, tpe, f, a))) } - case tpe: InterfaceType[_, _] ⇒ + case tpe: InterfaceType[_, _] => validate(_.validateInterfaceType(schema, tpe)) - tpe.fields.foreach { f ⇒ + tpe.fields.foreach { f => validate(_.validateField(schema, tpe, f)) - f.arguments.foreach(a ⇒ validate(_.validateFieldArgument(schema, tpe, f, a))) + f.arguments.foreach(a => validate(_.validateFieldArgument(schema, tpe, f, a))) } - case _ ⇒ // everything is fine + case _ => // everything is fine } - schema.directives.foreach { d ⇒ + schema.directives.foreach { d => validate(_.validateDirective(schema, d)) - d.arguments.foreach(a ⇒ validate(_.validateDirectiveArgument(schema, d, a))) + d.arguments.foreach(a => validate(_.validateDirectiveArgument(schema, d, a))) } violations.result().toList diff --git a/src/main/scala/sangria/schema/package.scala b/src/main/scala/sangria/schema/package.scala index 6580ab5e..f5a8db40 100644 --- a/src/main/scala/sangria/schema/package.scala +++ b/src/main/scala/sangria/schema/package.scala @@ -12,19 +12,19 @@ package object schema { "Int can represent values between -(2^31) and 2^31 - 1."), coerceOutput = valueOutput, coerceUserInput = { - case i: Int ⇒ Right(i) - case i: Long if i.isValidInt ⇒ Right(i.toInt) - case i: BigInt if !i.isValidInt ⇒ Left(BigIntCoercionViolation) - case i: BigInt ⇒ Right(i.intValue) - case d: Double if d.isValidInt ⇒ Right(d.intValue) - case d: BigDecimal if d.isValidInt ⇒ Right(d.intValue) - case _ ⇒ Left(IntCoercionViolation) + case i: Int => Right(i) + case i: Long if i.isValidInt => Right(i.toInt) + case i: BigInt if !i.isValidInt => Left(BigIntCoercionViolation) + case i: BigInt => Right(i.intValue) + case d: Double if d.isValidInt => Right(d.intValue) + case d: BigDecimal if d.isValidInt => Right(d.intValue) + case _ => Left(IntCoercionViolation) }, coerceInput = { - case ast.IntValue(i, _, _) ⇒ Right(i) - case ast.BigIntValue(i, _, _) if !i.isValidInt ⇒ Left(BigIntCoercionViolation) - case ast.BigIntValue(i, _, _) ⇒ Right(i.intValue) - case _ ⇒ Left(IntCoercionViolation) + case ast.IntValue(i, _, _) => Right(i) + case ast.BigIntValue(i, _, _) if !i.isValidInt => Left(BigIntCoercionViolation) + case ast.BigIntValue(i, _, _) => Right(i.intValue) + case _ => Left(IntCoercionViolation) }) implicit val LongType = ScalarType[Long]("Long", @@ -33,19 +33,19 @@ package object schema { "Long can represent values between -(2^63) and 2^63 - 1."), coerceOutput = valueOutput, coerceUserInput = { - case i: Int ⇒ Right(i: Long) - case i: Long ⇒ Right(i) - case i: BigInt if !i.isValidLong ⇒ Left(BigLongCoercionViolation) - case i: BigInt ⇒ Right(i.longValue) - case d: Double if d.isWhole ⇒ Right(d.toLong) - case d: BigDecimal if d.isValidLong ⇒ Right(d.longValue) - case _ ⇒ Left(LongCoercionViolation) + case i: Int => Right(i: Long) + case i: Long => Right(i) + case i: BigInt if !i.isValidLong => Left(BigLongCoercionViolation) + case i: BigInt => Right(i.longValue) + case d: Double if d.isWhole => Right(d.toLong) + case d: BigDecimal if d.isValidLong => Right(d.longValue) + case _ => Left(LongCoercionViolation) }, coerceInput = { - case ast.IntValue(i, _, _) ⇒ Right(i: Long) - case ast.BigIntValue(i, _, _) if !i.isValidLong ⇒ Left(BigLongCoercionViolation) - case ast.BigIntValue(i, _, _) ⇒ Right(i.longValue) - case _ ⇒ Left(LongCoercionViolation) + case ast.IntValue(i, _, _) => Right(i: Long) + case ast.BigIntValue(i, _, _) if !i.isValidLong => Left(BigLongCoercionViolation) + case ast.BigIntValue(i, _, _) => Right(i.longValue) + case _ => Left(LongCoercionViolation) }) implicit val BigIntType = ScalarType[BigInt]("BigInt", @@ -54,24 +54,24 @@ package object schema { "BigInt can represent arbitrary big values."), coerceOutput = valueOutput, coerceUserInput = { - case i: Int ⇒ Right(BigInt(i)) - case i: Long ⇒ Right(BigInt(i)) - case i: BigInt ⇒ Right(i) - case d: Double if d.isWhole ⇒ Right(BigInt(d.toLong)) - case d: BigDecimal if d.isWhole ⇒ Right(d.toBigInt) - case _ ⇒ Left(IntCoercionViolation) + case i: Int => Right(BigInt(i)) + case i: Long => Right(BigInt(i)) + case i: BigInt => Right(i) + case d: Double if d.isWhole => Right(BigInt(d.toLong)) + case d: BigDecimal if d.isWhole => Right(d.toBigInt) + case _ => Left(IntCoercionViolation) }, coerceInput = { - case ast.IntValue(i, _, _) ⇒ Right(i) - case ast.BigIntValue(i, _, _) ⇒ Right(i) - case _ ⇒ Left(IntCoercionViolation) + case ast.IntValue(i, _, _) => Right(i) + case ast.BigIntValue(i, _, _) => Right(i) + case _ => Left(IntCoercionViolation) }) implicit val FloatType = ScalarType[Double]("Float", description = Some( "The `Float` scalar type represents signed double-precision fractional " + "values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point)."), - coerceOutput = (v, _) ⇒ { + coerceOutput = (v, _) => { // .isNaN and .isInfinity box, we explicitly avoid that here if (java.lang.Double.isNaN(v) || java.lang.Double.isInfinite(v)) null @@ -79,54 +79,54 @@ package object schema { v }, coerceUserInput = { - case i: Int ⇒ Right(i.toDouble) - case i: Long ⇒ Right(i.toDouble) - case i: BigInt if !i.isValidDouble ⇒ Left(BigDecimalCoercionViolation) - case i: BigInt ⇒ Right(i.doubleValue) - case d: Double ⇒ Right(d) - case d: BigDecimal if !d.isDecimalDouble ⇒ Left(BigDecimalCoercionViolation) - case d: BigDecimal ⇒ Right(d.doubleValue) - case _ ⇒ Left(FloatCoercionViolation) + case i: Int => Right(i.toDouble) + case i: Long => Right(i.toDouble) + case i: BigInt if !i.isValidDouble => Left(BigDecimalCoercionViolation) + case i: BigInt => Right(i.doubleValue) + case d: Double => Right(d) + case d: BigDecimal if !d.isDecimalDouble => Left(BigDecimalCoercionViolation) + case d: BigDecimal => Right(d.doubleValue) + case _ => Left(FloatCoercionViolation) }, coerceInput = { - case ast.FloatValue(d, _, _) ⇒ Right(d) - case ast.BigDecimalValue(d, _, _) if !d.isDecimalDouble ⇒ Left(BigDecimalCoercionViolation) - case ast.BigDecimalValue(d, _, _) ⇒ Right(d.doubleValue) - case ast.IntValue(i, _, _) ⇒ Right(i) - case ast.BigIntValue(i, _, _) if !i.isValidDouble ⇒ Left(BigDecimalCoercionViolation) - case ast.BigIntValue(i, _, _) ⇒ Right(i.doubleValue) - case _ ⇒ Left(FloatCoercionViolation) + case ast.FloatValue(d, _, _) => Right(d) + case ast.BigDecimalValue(d, _, _) if !d.isDecimalDouble => Left(BigDecimalCoercionViolation) + case ast.BigDecimalValue(d, _, _) => Right(d.doubleValue) + case ast.IntValue(i, _, _) => Right(i) + case ast.BigIntValue(i, _, _) if !i.isValidDouble => Left(BigDecimalCoercionViolation) + case ast.BigIntValue(i, _, _) => Right(i.doubleValue) + case _ => Left(FloatCoercionViolation) }) implicit val BigDecimalType = ScalarType[BigDecimal]("BigDecimal", description = Some("The `BigDecimal` scalar type represents signed fractional values with arbitrary precision."), coerceOutput = valueOutput, coerceUserInput = { - case i: Int ⇒ Right(BigDecimal(i)) - case i: Long ⇒ Right(BigDecimal(i)) - case i: BigInt ⇒ Right(BigDecimal(i)) - case d: Double ⇒ Right(BigDecimal(d)) - case d: BigDecimal ⇒ Right(d) - case _ ⇒ Left(FloatCoercionViolation) + case i: Int => Right(BigDecimal(i)) + case i: Long => Right(BigDecimal(i)) + case i: BigInt => Right(BigDecimal(i)) + case d: Double => Right(BigDecimal(d)) + case d: BigDecimal => Right(d) + case _ => Left(FloatCoercionViolation) }, coerceInput = { - case ast.BigDecimalValue(d, _, _) ⇒ Right(d) - case ast.FloatValue(d, _, _) ⇒ Right(BigDecimal(d)) - case ast.IntValue(i, _, _) ⇒ Right(BigDecimal(i)) - case ast.BigIntValue(i, _, _) ⇒ Right(BigDecimal(i)) - case _ ⇒ Left(FloatCoercionViolation) + case ast.BigDecimalValue(d, _, _) => Right(d) + case ast.FloatValue(d, _, _) => Right(BigDecimal(d)) + case ast.IntValue(i, _, _) => Right(BigDecimal(i)) + case ast.BigIntValue(i, _, _) => Right(BigDecimal(i)) + case _ => Left(FloatCoercionViolation) }) implicit val BooleanType = ScalarType[Boolean]("Boolean", description = Some("The `Boolean` scalar type represents `true` or `false`."), coerceOutput = valueOutput, coerceUserInput = { - case b: Boolean ⇒ Right(b) - case _ ⇒ Left(BooleanCoercionViolation) + case b: Boolean => Right(b) + case _ => Left(BooleanCoercionViolation) }, coerceInput = { - case ast.BooleanValue(b, _, _) ⇒ Right(b) - case _ ⇒ Left(BooleanCoercionViolation) + case ast.BooleanValue(b, _, _) => Right(b) + case _ => Left(BooleanCoercionViolation) }) implicit val StringType = ScalarType[String]("String", @@ -136,12 +136,12 @@ package object schema { "represent free-form human-readable text."), coerceOutput = valueOutput, coerceUserInput = { - case s: String ⇒ Right(s) - case _ ⇒ Left(StringCoercionViolation) + case s: String => Right(s) + case _ => Left(StringCoercionViolation) }, coerceInput = { - case ast.StringValue(s, _, _, _, _) ⇒ Right(s) - case _ ⇒ Left(StringCoercionViolation) + case ast.StringValue(s, _, _, _, _) => Right(s) + case _ => Left(StringCoercionViolation) }) val IDType = ScalarType[String]("ID", @@ -153,17 +153,17 @@ package object schema { "(such as `4`) input value will be accepted as an ID."), coerceOutput = valueOutput, coerceUserInput = { - case s: String ⇒ Right(s) - case i: Int ⇒ Right(i.toString) - case i: Long ⇒ Right(i.toString) - case i: BigInt ⇒ Right(i.toString) - case _ ⇒ Left(IDCoercionViolation) + case s: String => Right(s) + case i: Int => Right(i.toString) + case i: Long => Right(i.toString) + case i: BigInt => Right(i.toString) + case _ => Left(IDCoercionViolation) }, coerceInput = { - case ast.StringValue(id, _, _, _, _) ⇒ Right(id) - case ast.IntValue(id, _, _) ⇒ Right(id.toString) - case ast.BigIntValue(id, _, _) ⇒ Right(id.toString) - case _ ⇒ Left(IDCoercionViolation) + case ast.StringValue(id, _, _, _, _) => Right(id) + case ast.IntValue(id, _, _) => Right(id.toString) + case ast.BigIntValue(id, _, _) => Right(id.toString) + case _ => Left(IDCoercionViolation) }) val BuiltinGraphQLScalars: List[ScalarType[_]] = @@ -193,7 +193,7 @@ package object schema { // if we don't know if we should include it, then we should include it: // ValueCollector will fail before we get here if values must be known, such as when preparing or executing a query, // but for e.g. running a QueryReducer without known variables, we must be conservative - shouldInclude = ctx ⇒ ctx.args.argOpt(IfArg).getOrElse(true)) + shouldInclude = ctx => ctx.args.argOpt(IfArg).getOrElse(true)) val SkipDirective = Directive("skip", description = Some("Directs the executor to skip this field or fragment when the `if` argument is true."), @@ -202,7 +202,7 @@ package object schema { // if we don't know if we should include it, then we should include it: // ValueCollector will fail before we get here if values must be known, such as when preparing or executing a query, // but for e.g. running a QueryReducer without known variables, we must be conservative - shouldInclude = ctx ⇒ !ctx.args.argOpt(IfArg).getOrElse(false)) + shouldInclude = ctx => !ctx.args.argOpt(IfArg).getOrElse(false)) val DefaultDeprecationReason = "No longer supported" @@ -217,7 +217,7 @@ package object schema { description = Some("Marks an element of a GraphQL schema as no longer supported."), arguments = ReasonArg :: Nil, locations = Set(DirectiveLocation.FieldDefinition, DirectiveLocation.EnumValue), - shouldInclude = ctx ⇒ !ctx.arg(IfArg)) + shouldInclude = ctx => !ctx.arg(IfArg)) val BuiltinDirectives = IncludeDirective :: SkipDirective :: DeprecatedDirective :: Nil diff --git a/src/main/scala/sangria/util/Cache.scala b/src/main/scala/sangria/util/Cache.scala index 28da2be5..a0f63235 100644 --- a/src/main/scala/sangria/util/Cache.scala +++ b/src/main/scala/sangria/util/Cache.scala @@ -8,20 +8,20 @@ trait Cache[Key, Value] { def contains(key: Key): Boolean def apply(key: Key): Value def get(key: Key): Option[Value] - def getOrElse(key: Key, default: ⇒ Value): Value + def getOrElse(key: Key, default: => Value): Value def update(key: Key, value: Value): Unit def remove(key: Key): Unit def clear(): Unit // NOTE: that `getOrElseUpdate` allows a race condition between value retrieval and cache update. // It is an explicit decision to avoid any kind of synchronization (it is preferred to recompute value multiple times than to synchronize) - def getOrElseUpdate(key: Key, fn: ⇒ Value): Value - def find(fn: (Key, Value) ⇒ Boolean): Option[(Key, Value)] - def mapToSet[R](fn: (Key, Value) ⇒ R): Set[R] - def mapValues[R](fn: Value ⇒ R): Map[Key, R] - def keyExists(fn: Key ⇒ Boolean): Boolean - def forEachValue(fn: Value ⇒ Unit): Unit - def removeKeys(fn: Key ⇒ Boolean): Unit + def getOrElseUpdate(key: Key, fn: => Value): Value + def find(fn: (Key, Value) => Boolean): Option[(Key, Value)] + def mapToSet[R](fn: (Key, Value) => R): Set[R] + def mapValues[R](fn: Value => R): Map[Key, R] + def keyExists(fn: Key => Boolean): Boolean + def forEachValue(fn: Value => Unit): Unit + def removeKeys(fn: Key => Boolean): Unit } object Cache { @@ -32,7 +32,7 @@ object Cache { def apply[Key, Value](elems: (Key, Value)*) = { val c = empty[Key, Value] - elems.foreach {case (key, value) ⇒ c(key) = value} + elems.foreach {case (key, value) => c(key) = value} c } } diff --git a/src/main/scala/sangria/util/ConcurrentHashMapCache.scala b/src/main/scala/sangria/util/ConcurrentHashMapCache.scala index 7f725d58..2a3cd248 100644 --- a/src/main/scala/sangria/util/ConcurrentHashMapCache.scala +++ b/src/main/scala/sangria/util/ConcurrentHashMapCache.scala @@ -10,30 +10,30 @@ class ConcurrentHashMapCache[Key, Value] extends Cache[Key, Value] { def contains(key: Key) = cache.containsKey(key) def apply(key: Key) = cache.get(key) match { - case null ⇒ throw new NoSuchElementException - case v ⇒ v + case null => throw new NoSuchElementException + case v => v } def get(key: Key) = Option(cache.get(key)) - def getOrElse(key: Key, default: ⇒ Value) = cache.get(key) match { - case null ⇒ default - case v ⇒ v + def getOrElse(key: Key, default: => Value) = cache.get(key) match { + case null => default + case v => v } def update(key: Key, value: Value) = cache.put(key, value) def remove(key: Key) = cache.remove(key) def clear() = cache.clear() - def getOrElseUpdate(key: Key, fn: ⇒ Value) = cache.get(key) match { - case null ⇒ + def getOrElseUpdate(key: Key, fn: => Value) = cache.get(key) match { + case null => val res = fn cache.put(key, res) res - case v ⇒ v + case v => v } - def find(fn: (Key, Value) ⇒ Boolean) = { + def find(fn: (Key, Value) => Boolean) = { val it = cache.entrySet().iterator() var res: Option[(Key, Value)] = None @@ -41,13 +41,13 @@ class ConcurrentHashMapCache[Key, Value] extends Cache[Key, Value] { val elem = it.next() if (fn(elem.getKey, elem.getValue)) - res = Some(elem.getKey → elem.getValue) + res = Some(elem.getKey -> elem.getValue) } res } - def mapToSet[R](fn: (Key, Value) ⇒ R) = { + def mapToSet[R](fn: (Key, Value) => R) = { val it = cache.entrySet().iterator() val res = scala.collection.mutable.Set[R]() @@ -60,7 +60,7 @@ class ConcurrentHashMapCache[Key, Value] extends Cache[Key, Value] { res } - def mapValues[R](fn: Value ⇒ R) = { + def mapValues[R](fn: Value => R) = { val it = cache.entrySet().iterator() val res = scala.collection.mutable.Map[Key, R]() @@ -73,7 +73,7 @@ class ConcurrentHashMapCache[Key, Value] extends Cache[Key, Value] { res } - def keyExists(fn: Key ⇒ Boolean): Boolean = { + def keyExists(fn: Key => Boolean): Boolean = { val it = cache.entrySet().iterator() while (it.hasNext) { @@ -85,7 +85,7 @@ class ConcurrentHashMapCache[Key, Value] extends Cache[Key, Value] { false } - def forEachValue(fn: Value ⇒ Unit) = { + def forEachValue(fn: Value => Unit) = { val it = cache.values().iterator() while (it.hasNext) { @@ -95,7 +95,7 @@ class ConcurrentHashMapCache[Key, Value] extends Cache[Key, Value] { } } - def removeKeys(fn: Key ⇒ Boolean) = { + def removeKeys(fn: Key => Boolean) = { val it = cache.keySet().iterator() while (it.hasNext) { @@ -108,8 +108,8 @@ class ConcurrentHashMapCache[Key, Value] extends Cache[Key, Value] { def canEqual(other: Any): Boolean = other.isInstanceOf[ConcurrentHashMapCache[_, _]] override def equals(other: Any): Boolean = other match { - case that: ConcurrentHashMapCache[_, _] ⇒ (that canEqual this) && cache == that.cache - case _ ⇒ false + case that: ConcurrentHashMapCache[_, _] => (that canEqual this) && cache == that.cache + case _ => false } override def hashCode(): Int = diff --git a/src/main/scala/sangria/util/StringUtil.scala b/src/main/scala/sangria/util/StringUtil.scala index 391203a9..4704a39a 100644 --- a/src/main/scala/sangria/util/StringUtil.scala +++ b/src/main/scala/sangria/util/StringUtil.scala @@ -36,8 +36,8 @@ object StringUtil { val inputThreshold = input.length / 2 options - .map (opt ⇒ opt → lexicalDistance(input, opt)) - .filter (opt ⇒ opt._2 <= math.max(math.max(inputThreshold, opt._1.length / 2), 1)) + .map (opt => opt -> lexicalDistance(input, opt)) + .filter (opt => opt._2 <= math.max(math.max(inputThreshold, opt._1.length / 2), 1)) .sortBy (_._2) .map (_._1) } @@ -55,13 +55,13 @@ object StringUtil { * @return distance in number of edits */ def lexicalDistance(a: String, b: String): Int = { - val d = for (i ← 0 to a.length) yield ListBuffer.fill(b.length + 1)(i) + val d = for (i <- 0 to a.length) yield ListBuffer.fill(b.length + 1)(i) - for (j ← 1 to b.length) { + for (j <- 1 to b.length) { d(0)(j) = j } - for (i ← 1 to a.length; j ← 1 to b.length) { + for (i <- 1 to a.length; j <- 1 to b.length) { val cost = if (a(i - 1) == b(j - 1)) 0 else 1 d(i)(j) = math.min(math.min(d(i - 1)(j) + 1, d(i)(j - 1) + 1), d(i - 1)(j - 1) + cost) @@ -78,22 +78,22 @@ object StringUtil { def escapeString(str: String) = str flatMap { - case ch if ch > 0xfff ⇒ "\\u" + charHex(ch) - case ch if ch > 0xff ⇒ "\\u0" + charHex(ch) - case ch if ch > 0x7f ⇒ "\\u00" + charHex(ch) - case ch if ch < 32 ⇒ + case ch if ch > 0xfff => "\\u" + charHex(ch) + case ch if ch > 0xff => "\\u0" + charHex(ch) + case ch if ch > 0x7f => "\\u00" + charHex(ch) + case ch if ch < 32 => ch match { - case '\b' ⇒ "\\b" - case '\n' ⇒ "\\n" - case '\t' ⇒ "\\t" - case '\f' ⇒ "\\f" - case '\r' ⇒ "\\r" - case ch if ch > 0xf ⇒ "\\u00" + charHex(ch) - case ch ⇒ "\\u000" + charHex(ch) + case '\b' => "\\b" + case '\n' => "\\n" + case '\t' => "\\t" + case '\f' => "\\f" + case '\r' => "\\r" + case ch if ch > 0xf => "\\u00" + charHex(ch) + case ch => "\\u000" + charHex(ch) } - case '"' ⇒ "\\\"" - case '\\' ⇒ "\\\\" - case ch ⇒ ch.toString + case '"' => "\\\"" + case '\\' => "\\\\" + case ch => ch.toString } def charHex(ch: Char): String = @@ -107,8 +107,8 @@ object StringUtil { */ def blockStringValue(rawString: String): String = { val lines = rawString.split("""\r\n|[\n\r]""") - val lineSizes = lines.map(l ⇒ l → leadingWhitespace(l)) - val commonIndentLines = lineSizes.drop(1).collect {case (line, size) if size != line.length ⇒ size} + val lineSizes = lines.map(l => l -> leadingWhitespace(l)) + val commonIndentLines = lineSizes.drop(1).collect {case (line, size) if size != line.length => size} val strippedLines = if (commonIndentLines.nonEmpty) { val commonIndent = commonIndentLines.min diff --git a/src/main/scala/sangria/util/TrieMapCache.scala b/src/main/scala/sangria/util/TrieMapCache.scala index 92b4d23e..53a5b54f 100644 --- a/src/main/scala/sangria/util/TrieMapCache.scala +++ b/src/main/scala/sangria/util/TrieMapCache.scala @@ -10,24 +10,24 @@ class TrieMapCache[Key, Value] extends Cache[Key, Value] { def contains(key: Key) = cache.contains(key) def apply(key: Key) = cache(key) def get(key: Key) = cache.get(key) - def getOrElse(key: Key, default: ⇒ Value) = cache.getOrElse(key, default) + def getOrElse(key: Key, default: => Value) = cache.getOrElse(key, default) def update(key: Key, value: Value) = cache.update(key, value) def remove(key: Key) = cache.remove(key) def clear() = cache.clear() - def getOrElseUpdate(key: Key, fn: ⇒ Value) = cache.getOrElseUpdate(key, fn) - def find(fn: (Key, Value) ⇒ Boolean) = cache.find {case (key, value) ⇒ fn(key, value)} - def mapToSet[R](fn: (Key, Value) ⇒ R) = cache.map {case (key, value) ⇒ fn(key, value)}.toSet - def mapValues[R](fn: Value ⇒ R) = cache.mapValues(fn).toMap - def keyExists(fn: Key ⇒ Boolean) = cache.keySet.exists(fn) - def forEachValue(fn: Value ⇒ Unit) = cache.values.foreach(fn) - def removeKeys(fn: Key ⇒ Boolean) = cache.keys.toVector.foreach(key ⇒ if (fn(key)) cache.remove(key)) + def getOrElseUpdate(key: Key, fn: => Value) = cache.getOrElseUpdate(key, fn) + def find(fn: (Key, Value) => Boolean) = cache.find {case (key, value) => fn(key, value)} + def mapToSet[R](fn: (Key, Value) => R) = cache.map {case (key, value) => fn(key, value)}.toSet + def mapValues[R](fn: Value => R) = cache.mapValues(fn).toMap + def keyExists(fn: Key => Boolean) = cache.keySet.exists(fn) + def forEachValue(fn: Value => Unit) = cache.values.foreach(fn) + def removeKeys(fn: Key => Boolean) = cache.keys.toVector.foreach(key => if (fn(key)) cache.remove(key)) def canEqual(other: Any): Boolean = other.isInstanceOf[TrieMapCache[_, _]] override def equals(other: Any): Boolean = other match { - case that: TrieMapCache[_, _] ⇒ (that canEqual this) && cache == that.cache - case _ ⇒ false + case that: TrieMapCache[_, _] => (that canEqual this) && cache == that.cache + case _ => false } override def hashCode(): Int = diff --git a/src/main/scala/sangria/validation/DocumentAnalyzer.scala b/src/main/scala/sangria/validation/DocumentAnalyzer.scala index ebf5be47..f3db9c45 100644 --- a/src/main/scala/sangria/validation/DocumentAnalyzer.scala +++ b/src/main/scala/sangria/validation/DocumentAnalyzer.scala @@ -25,9 +25,9 @@ case class DocumentAnalyzer(document: ast.Document) { val set = setsToVisit.pop() set.foreach { - case fs: ast.FragmentSpread ⇒ + case fs: ast.FragmentSpread => spreads += fs - case cont: ast.SelectionContainer ⇒ + case cont: ast.SelectionContainer => setsToVisit push cont.selections } } @@ -47,17 +47,17 @@ case class DocumentAnalyzer(document: ast.Document) { val node = nodesToVisit.pop() val spreads = getFragmentSpreads(node) - spreads.foreach { spread ⇒ + spreads.foreach { spread => val fragName = spread.name if (!collectedNames.contains(fragName)) { collectedNames += fragName document.fragments.get(fragName) match { - case Some(frag) ⇒ + case Some(frag) => frags += frag nodesToVisit.push(frag) - case None ⇒ // do nothing + case None => // do nothing } } } @@ -68,13 +68,13 @@ case class DocumentAnalyzer(document: ast.Document) { lazy val separateOperations: Map[Option[String], ast.Document] = document.operations.map { - case (name, definition) ⇒ name → separateOperation(definition) + case (name, definition) => name -> separateOperation(definition) } def separateOperation(definition: OperationDefinition): ast.Document = { val definitions = (definition +: getRecursivelyReferencedFragments(definition)).sortBy(_.location match { - case Some(pos) ⇒ pos.line - case _ ⇒ 0 + case Some(pos) => pos.line + case _ => 0 }) document.copy(definitions = definitions) @@ -85,4 +85,4 @@ case class DocumentAnalyzer(document: ast.Document) { Some(separateOperation(document.operations.head._2)) else document.operations.get(operationName).map(separateOperation) -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/QueryValidator.scala b/src/main/scala/sangria/validation/QueryValidator.scala index cf3997d0..2c10b2fd 100644 --- a/src/main/scala/sangria/validation/QueryValidator.scala +++ b/src/main/scala/sangria/validation/QueryValidator.scala @@ -66,8 +66,8 @@ class RuleBasedQueryValidator(rules: List[ValidationRule]) extends QueryValidato def validateInputDocument(schema: Schema[_, _], doc: ast.InputDocument, inputTypeName: String): Vector[Violation] = schema.getInputType(ast.NamedType(inputTypeName)) match { - case Some(it) ⇒ validateInputDocument(schema, doc, it) - case None ⇒ throw new IllegalStateException(s"Can't find input type '$inputTypeName' in the schema. Known input types are: ${schema.inputTypes.keys.toVector.sorted mkString ", "}.") + case Some(it) => validateInputDocument(schema, doc, it) + case None => throw new IllegalStateException(s"Can't find input type '$inputTypeName' in the schema. Known input types are: ${schema.inputTypes.keys.toVector.sorted mkString ", "}.") } def validateInputDocument(schema: Schema[_, _], doc: ast.InputDocument, inputType: InputType[_]): Vector[Violation] = { @@ -82,10 +82,10 @@ class RuleBasedQueryValidator(rules: List[ValidationRule]) extends QueryValidato def validateUsingRules(queryAst: ast.AstNode, ctx: ValidationContext, visitors: List[ValidationRule#AstValidatingVisitor], topLevel: Boolean): Unit = AstVisitor.visitAstRecursive( doc = queryAst, - onEnter = node ⇒ { + onEnter = node => { ctx.typeInfo.enter(node) - visitors foreach { visitor ⇒ + visitors foreach { visitor => if (ctx.validVisitor(visitor) && visitor.onEnter.isDefinedAt(node)) { handleResult(ctx, node, visitor, visitor.onEnter(node)) } @@ -93,8 +93,8 @@ class RuleBasedQueryValidator(rules: List[ValidationRule]) extends QueryValidato Continue }, - onLeave = node ⇒ { - visitors foreach { visitor ⇒ + onLeave = node => { + visitors foreach { visitor => if (visitor.onLeave.isDefinedAt(node) && ctx.validVisitor(visitor)) { handleResult(ctx, node, visitor, visitor.onLeave(node)) } @@ -110,18 +110,18 @@ class RuleBasedQueryValidator(rules: List[ValidationRule]) extends QueryValidato def handleResult(ctx: ValidationContext, node: ast.AstNode, visitor: ValidationRule#AstValidatingVisitor, visitRes: Either[Vector[Violation], AstVisitorCommand.Value]) = visitRes match { - case Left(violation) ⇒ + case Left(violation) => ctx.addViolations(violation) - case AstVisitorCommand.RightSkip ⇒ + case AstVisitorCommand.RightSkip => ctx.skips(visitor) = node - case Right(Break) ⇒ + case Right(Break) => ctx.ignoredVisitors += visitor - case _ ⇒ // do nothing + case _ => // do nothing } def withoutValidation[T : ClassTag] = { val cls = classTag[T].runtimeClass - val newRules = rules.filterNot(r ⇒ cls.isAssignableFrom(r.getClass)) + val newRules = rules.filterNot(r => cls.isAssignableFrom(r.getClass)) new RuleBasedQueryValidator(newRules) } @@ -149,56 +149,56 @@ class ValidationContext(val schema: Schema[_, _], val doc: ast.Document, val sou object ValidationContext { @deprecated("The validations are now implemented as a part of `ValuesOfCorrectType` validation.", "1.4.0") def isValidLiteralValue(tpe: InputType[_], value: ast.Value, sourceMapper: Option[SourceMapper]): Vector[Violation] = (tpe, value) match { - case (_, _: ast.VariableValue) ⇒ Vector.empty - case (OptionInputType(ofType), _: ast.NullValue) ⇒ Vector.empty - case (OptionInputType(ofType), v) ⇒ + case (_, _: ast.VariableValue) => Vector.empty + case (OptionInputType(ofType), _: ast.NullValue) => Vector.empty + case (OptionInputType(ofType), v) => isValidLiteralValue(ofType, v, sourceMapper) - case (ListInputType(ofType), ast.ListValue(values, _, pos)) ⇒ + case (ListInputType(ofType), ast.ListValue(values, _, pos)) => values.zipWithIndex.flatMap { - case (elem, idx) ⇒ isValidLiteralValue(ofType, elem, sourceMapper) map (ListValueViolation(idx, _, sourceMapper, pos.toList)) + case (elem, idx) => isValidLiteralValue(ofType, elem, sourceMapper) map (ListValueViolation(idx, _, sourceMapper, pos.toList)) } - case (ListInputType(ofType), v) ⇒ + case (ListInputType(ofType), v) => isValidLiteralValue(ofType, v, sourceMapper) map (ListValueViolation(0, _, sourceMapper, v.location.toList)) - case (io: InputObjectType[_], ast.ObjectValue(fields, _, pos)) ⇒ + case (io: InputObjectType[_], ast.ObjectValue(fields, _, pos)) => val unknownFields = fields.collect { - case f if !io.fieldsByName.contains(f.name) ⇒ + case f if !io.fieldsByName.contains(f.name) => UnknownInputObjectFieldViolation(SchemaRenderer.renderTypeName(io, true), f.name, sourceMapper, f.location.toList) } val fieldViolations = - io.fields.toVector.flatMap { field ⇒ + io.fields.toVector.flatMap { field => val astField = fields.find(_.name == field.name) (astField, field.fieldType) match { - case (None, _: OptionInputType[_]) ⇒ + case (None, _: OptionInputType[_]) => Vector.empty - case (None, t) ⇒ + case (None, t) => Vector(NotNullInputObjectFieldMissingViolation(io.name, field.name, SchemaRenderer.renderTypeName(t), sourceMapper, pos.toList)) - case (Some(af), _) ⇒ + case (Some(af), _) => isValidLiteralValue(field.fieldType, af.value, sourceMapper) map (MapValueViolation(field.name, _, sourceMapper, af.location.toList)) } } unknownFields ++ fieldViolations - case (io: InputObjectType[_], v) ⇒ + case (io: InputObjectType[_], v) => Vector(InputObjectIsOfWrongTypeMissingViolation(SchemaRenderer.renderTypeName(io, true), sourceMapper, v.location.toList)) - case (s: ScalarType[_], v) ⇒ + case (s: ScalarType[_], v) => s.coerceInput(v) match { - case Left(violation) ⇒ Vector(violation) - case _ ⇒ Vector.empty + case Left(violation) => Vector(violation) + case _ => Vector.empty } - case (s: ScalarAlias[_, _], v) ⇒ + case (s: ScalarAlias[_, _], v) => s.aliasFor.coerceInput(v) match { - case Left(violation) ⇒ Vector(violation) - case Right(v) ⇒ s.fromScalar(v) match { - case Left(violation) ⇒ Vector(violation) - case _ ⇒ Vector.empty + case Left(violation) => Vector(violation) + case Right(v) => s.fromScalar(v) match { + case Left(violation) => Vector(violation) + case _ => Vector.empty } } - case (enum: EnumType[_], v) ⇒ + case (enum: EnumType[_], v) => enum.coerceInput(v) match { - case Left(violation) ⇒ Vector(violation) - case _ ⇒ Vector.empty + case Left(violation) => Vector(violation) + case _ => Vector.empty } } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/SchemaBasedDocumentAnalyzer.scala b/src/main/scala/sangria/validation/SchemaBasedDocumentAnalyzer.scala index 0c5dd52d..ef8e40c3 100644 --- a/src/main/scala/sangria/validation/SchemaBasedDocumentAnalyzer.scala +++ b/src/main/scala/sangria/validation/SchemaBasedDocumentAnalyzer.scala @@ -21,10 +21,10 @@ case class SchemaBasedDocumentAnalyzer(schema: Schema[_, _], document: ast.Docum def getVariableUsages(astNode: ast.SelectionContainer): List[VariableUsage] = variableUsages.getOrElseUpdate(astNode.cacheKeyHash, { - AstVisitor.visitAstWithState(schema, astNode, ListBuffer[VariableUsage]()) { (typeInfo, usages) ⇒ + AstVisitor.visitAstWithState(schema, astNode, ListBuffer[VariableUsage]()) { (typeInfo, usages) => AstVisitor { - case _: ast.VariableDefinition ⇒ Skip - case vv: ast.VariableValue ⇒ + case _: ast.VariableDefinition => Skip + case vv: ast.VariableValue => usages += VariableUsage(vv, typeInfo.inputType, typeInfo.defaultValue) Continue } @@ -34,13 +34,13 @@ case class SchemaBasedDocumentAnalyzer(schema: Schema[_, _], document: ast.Docum def getRecursiveVariableUsages(operation: ast.OperationDefinition): List[VariableUsage] = recursiveVariableUsages.getOrElseUpdate(operation.cacheKeyHash, getRecursivelyReferencedFragments(operation).foldLeft(getVariableUsages(operation)) { - case (acc, fragment) ⇒ acc ++ getVariableUsages(fragment) + case (acc, fragment) => acc ++ getVariableUsages(fragment) }) lazy val deprecatedUsages: Vector[DeprecatedUsage] = - AstVisitor.visitAstWithState(schema, document, MutableMap[String, DeprecatedUsage]()) { (typeInfo, deprecated) ⇒ + AstVisitor.visitAstWithState(schema, document, MutableMap[String, DeprecatedUsage]()) { (typeInfo, deprecated) => AstVisitor.simple { - case astField: ast.Field if typeInfo.fieldDef.isDefined && typeInfo.fieldDef.get.deprecationReason.isDefined && typeInfo.previousParentType.isDefined ⇒ + case astField: ast.Field if typeInfo.fieldDef.isDefined && typeInfo.fieldDef.get.deprecationReason.isDefined && typeInfo.previousParentType.isDefined => val parent = typeInfo.previousParentType.get val field = typeInfo.fieldDef.get @@ -49,25 +49,25 @@ case class SchemaBasedDocumentAnalyzer(schema: Schema[_, _], document: ast.Docum if (!deprecated.contains(key)) deprecated(key) = DeprecatedField(parent, field, astField, typeInfo.fieldDef.get.deprecationReason.get) - case enumValue: ast.EnumValue ⇒ + case enumValue: ast.EnumValue => typeInfo.inputType.map(_.namedType) match { - case Some(parent: EnumType[_]) if typeInfo.enumValue.isDefined ⇒ + case Some(parent: EnumType[_]) if typeInfo.enumValue.isDefined => val value = typeInfo.enumValue.get val key = parent.name + "." + value.name if (value.deprecationReason.isDefined && !deprecated.contains(key)) deprecated(key) = DeprecatedEnumValue(parent, value, enumValue, value.deprecationReason.get) - case _ ⇒ // do nothing + case _ => // do nothing } } }.values.toVector lazy val introspectionUsages: Vector[IntrospectionUsage] = - AstVisitor.visitAstWithState(schema, document, MutableMap[String, IntrospectionUsage]()) { (typeInfo, usages) ⇒ + AstVisitor.visitAstWithState(schema, document, MutableMap[String, IntrospectionUsage]()) { (typeInfo, usages) => AstVisitor.simple { - case astField: ast.Field if typeInfo.fieldDef.isDefined && typeInfo.previousParentType.isDefined ⇒ + case astField: ast.Field if typeInfo.fieldDef.isDefined && typeInfo.previousParentType.isDefined => val parent = typeInfo.previousParentType.get val field = typeInfo.fieldDef.get diff --git a/src/main/scala/sangria/validation/TypeComparators.scala b/src/main/scala/sangria/validation/TypeComparators.scala index ca8264b4..d1b0f64a 100644 --- a/src/main/scala/sangria/validation/TypeComparators.scala +++ b/src/main/scala/sangria/validation/TypeComparators.scala @@ -5,24 +5,24 @@ import sangria.schema._ object TypeComparators { def isEqualType(type1: Type, type2: Type): Boolean = (type1, type2) match { - case (OptionType(t1), OptionType(t2)) ⇒ isEqualType(t1, t2) - case (OptionInputType(t1), OptionInputType(t2)) ⇒ isEqualType(t1, t2) - case (ListType(t1), ListType(t2)) ⇒ isEqualType(t1, t2) - case (ListInputType(t1), ListInputType(t2)) ⇒ isEqualType(t1, t2) - case (t1: Named, t2: Named) ⇒ t1.name == t2.name - case _ ⇒ false + case (OptionType(t1), OptionType(t2)) => isEqualType(t1, t2) + case (OptionInputType(t1), OptionInputType(t2)) => isEqualType(t1, t2) + case (ListType(t1), ListType(t2)) => isEqualType(t1, t2) + case (ListInputType(t1), ListInputType(t2)) => isEqualType(t1, t2) + case (t1: Named, t2: Named) => t1.name == t2.name + case _ => false } def isSubType(schema: Schema[_, _], subType: Type, superType: Type): Boolean = (subType, superType) match { - case (OptionType(ofType1), OptionType(ofType2)) ⇒ isSubType(schema, ofType1, ofType2) - case (OptionInputType(ofType1), OptionInputType(ofType2)) ⇒ isSubType(schema, ofType1, ofType2) - case (sub, OptionType(ofType2)) ⇒ isSubType(schema, sub, ofType2) - case (sub, OptionInputType(ofType2)) ⇒ isSubType(schema, sub, ofType2) - case (ListType(ofType1), ListType(ofType2)) ⇒ isSubType(schema, ofType1, ofType2) - case (ListInputType(ofType1), ListInputType(ofType2)) ⇒ isSubType(schema, ofType1, ofType2) - case (t1: ObjectType[_, _], t2: AbstractType) ⇒ schema.isPossibleType(t2.name, t1) - case (t1: Named, t2: Named) ⇒ t1.name == t2.name - case _ ⇒ false + case (OptionType(ofType1), OptionType(ofType2)) => isSubType(schema, ofType1, ofType2) + case (OptionInputType(ofType1), OptionInputType(ofType2)) => isSubType(schema, ofType1, ofType2) + case (sub, OptionType(ofType2)) => isSubType(schema, sub, ofType2) + case (sub, OptionInputType(ofType2)) => isSubType(schema, sub, ofType2) + case (ListType(ofType1), ListType(ofType2)) => isSubType(schema, ofType1, ofType2) + case (ListInputType(ofType1), ListInputType(ofType2)) => isSubType(schema, ofType1, ofType2) + case (t1: ObjectType[_, _], t2: AbstractType) => schema.isPossibleType(t2.name, t1) + case (t1: Named, t2: Named) => t1.name == t2.name + case _ => false } } diff --git a/src/main/scala/sangria/validation/TypeInfo.scala b/src/main/scala/sangria/validation/TypeInfo.scala index 21e6f297..c9e10953 100644 --- a/src/main/scala/sangria/validation/TypeInfo.scala +++ b/src/main/scala/sangria/validation/TypeInfo.scala @@ -34,18 +34,18 @@ class TypeInfo(schema: Schema[_, _], initialType: Option[Type] = None) { def forcePushType(tpe: Type): Unit = { tpe match { - case t: InputType[_] ⇒ inputTypeStack.push(Some(t)) - case _ ⇒ // do nothing + case t: InputType[_] => inputTypeStack.push(Some(t)) + case _ => // do nothing } tpe match { - case t: CompositeType[_] ⇒ parentTypeStack.push(Some(t)) - case _ ⇒ // do nothing + case t: CompositeType[_] => parentTypeStack.push(Some(t)) + case _ => // do nothing } tpe match { - case t: OutputType[_] ⇒ typeStack.push(Some(t)) - case _ ⇒ // do nothing + case t: OutputType[_] => typeStack.push(Some(t)) + case _ => // do nothing } } @@ -59,9 +59,9 @@ class TypeInfo(schema: Schema[_, _], initialType: Option[Type] = None) { ancestorStack push node node match { - case document: ast.Document ⇒ + case document: ast.Document => documentStack push document - case f: ast.Field ⇒ + case f: ast.Field => val parent = parentType val fieldDef = parent flatMap (getFieldDef(_, f)) val fieldType = fieldDef map (_.fieldType) @@ -70,128 +70,128 @@ class TypeInfo(schema: Schema[_, _], initialType: Option[Type] = None) { typeStack push fieldType pushParent() - case ast.Directive(name, _, _, _) ⇒ + case ast.Directive(name, _, _, _) => directive = schema.directivesByName get name - case ast.OperationDefinition(ast.OperationType.Query, _, _, _, _, _, _, _) ⇒ + case ast.OperationDefinition(ast.OperationType.Query, _, _, _, _, _, _, _) => typeStack push Some(schema.query) pushParent() - case ast.OperationDefinition(ast.OperationType.Mutation, _, _, _, _, _, _, _) ⇒ + case ast.OperationDefinition(ast.OperationType.Mutation, _, _, _, _, _, _, _) => typeStack push schema.mutation pushParent() - case ast.OperationDefinition(ast.OperationType.Subscription, _, _, _, _, _, _, _) ⇒ + case ast.OperationDefinition(ast.OperationType.Subscription, _, _, _, _, _, _, _) => typeStack push schema.subscription pushParent() - case fs: ast.FragmentSpread ⇒ + case fs: ast.FragmentSpread => val fragment = document.flatMap(_.fragments.get(fs.name)) - typeStack.push(fragment.flatMap(fd ⇒ schema.allTypes get fd.typeCondition.name)) + typeStack.push(fragment.flatMap(fd => schema.allTypes get fd.typeCondition.name)) pushParent() - case fd: ast.FragmentDefinition ⇒ + case fd: ast.FragmentDefinition => typeStack.push(schema.allTypes get fd.typeCondition.name) pushParent() - case ifd: ast.InlineFragment ⇒ + case ifd: ast.InlineFragment => typeStack.push(ifd.typeCondition.fold(tpe)(schema.allTypes get _.name)) pushParent() - case vd: ast.VariableDefinition ⇒ + case vd: ast.VariableDefinition => inputTypeStack push schema.getInputType(vd.tpe) - case a: ast.Argument ⇒ - argument = directive orElse fieldDef flatMap { withArgs ⇒ + case a: ast.Argument => + argument = directive orElse fieldDef flatMap { withArgs => withArgs.arguments find (_.name == a.name) } defaultValueStack push argument.flatMap(_.defaultValue) inputTypeStack push argument.map(_.inputValueType) - case ast.ListValue(values, _, _) ⇒ + case ast.ListValue(values, _, _) => // List positions never have a default value. defaultValueStack push None inputType match { - case Some(it) ⇒ it.nonOptionalType match { - case it: ListInputType[_] ⇒ inputTypeStack push Some(it.ofType) - case _ ⇒ inputTypeStack push None + case Some(it) => it.nonOptionalType match { + case it: ListInputType[_] => inputTypeStack push Some(it.ofType) + case _ => inputTypeStack push None } - case None ⇒ inputTypeStack push None + case None => inputTypeStack push None } - case ast.ObjectField(name, value, _, _) ⇒ + case ast.ObjectField(name, value, _, _) => val (fieldType, defaultValue) = inputType match { - case Some(it) if it.namedType.isInstanceOf[InputObjectType[_]] ⇒ + case Some(it) if it.namedType.isInstanceOf[InputObjectType[_]] => it.namedType match { - case obj: InputObjectType[_] ⇒ + case obj: InputObjectType[_] => val field = obj.fieldsByName.get(name) - field.map(_.inputValueType) → field.flatMap(_.defaultValue) - case _ ⇒ None → None + field.map(_.inputValueType) -> field.flatMap(_.defaultValue) + case _ => None -> None } - case _ ⇒ None → None + case _ => None -> None } defaultValueStack push defaultValue inputTypeStack push fieldType - case ast.EnumValue(name, _, _) ⇒ + case ast.EnumValue(name, _, _) => enumValue = inputType match { - case Some(it) ⇒ it.namedType match { - case enum: EnumType[_] ⇒ enum.byName.get(name) - case _ ⇒ None + case Some(it) => it.namedType match { + case enum: EnumType[_] => enum.byName.get(name) + case _ => None } - case None ⇒ None + case None => None } - case _ ⇒ // ignore + case _ => // ignore } } def pushParent(): Unit = { tpe match { - case Some(some) ⇒ some.namedType match { - case comp: CompositeType[_] ⇒ parentTypeStack push Some(comp) - case _ ⇒ parentTypeStack push None + case Some(some) => some.namedType match { + case comp: CompositeType[_] => parentTypeStack push Some(comp) + case _ => parentTypeStack push None } - case _ ⇒ parentTypeStack push None + case _ => parentTypeStack push None } } def leave(node: ast.AstNode) = { node match { - case document: ast.Document ⇒ + case document: ast.Document => documentStack.pop() - case f: ast.Field ⇒ + case f: ast.Field => fieldDefStack.pop() typeStack.pop() parentTypeStack.pop() - case ast.Directive(name, _, _, _) ⇒ + case ast.Directive(name, _, _, _) => directive = None - case ast.OperationDefinition(ast.OperationType.Query, _, _, _, _, _, _, _) ⇒ + case ast.OperationDefinition(ast.OperationType.Query, _, _, _, _, _, _, _) => typeStack.pop() parentTypeStack.pop() - case ast.OperationDefinition(ast.OperationType.Mutation, _, _, _, _, _, _, _) ⇒ + case ast.OperationDefinition(ast.OperationType.Mutation, _, _, _, _, _, _, _) => typeStack.pop() parentTypeStack.pop() - case ast.OperationDefinition(ast.OperationType.Subscription, _, _, _, _, _, _, _) ⇒ + case ast.OperationDefinition(ast.OperationType.Subscription, _, _, _, _, _, _, _) => typeStack.pop() parentTypeStack.pop() - case fs: ast.FragmentSpread ⇒ + case fs: ast.FragmentSpread => typeStack.pop() parentTypeStack.pop() - case fd: ast.FragmentDefinition ⇒ + case fd: ast.FragmentDefinition => typeStack.pop() parentTypeStack.pop() - case fd: ast.InlineFragment ⇒ + case fd: ast.InlineFragment => typeStack.pop() parentTypeStack.pop() - case vd: ast.VariableDefinition ⇒ + case vd: ast.VariableDefinition => inputTypeStack.pop() - case a: ast.Argument ⇒ + case a: ast.Argument => argument = None defaultValueStack.pop() inputTypeStack.pop() - case ast.ListValue(_, _, _) ⇒ + case ast.ListValue(_, _, _) => defaultValueStack.pop() inputTypeStack.pop() - case ast.ObjectField(_, _, _, _) ⇒ + case ast.ObjectField(_, _, _, _) => defaultValueStack.pop() inputTypeStack.pop() - case ast.EnumValue(_, _, _) ⇒ + case ast.EnumValue(_, _, _) => enumValue = None - case _ ⇒ // ignore + case _ => // ignore } ancestorStack.pop() @@ -205,8 +205,8 @@ class TypeInfo(schema: Schema[_, _], initialType: Option[Type] = None) { else if (astField.name == TypeNameMetaField.name) Some(TypeNameMetaField) else parent match { - case o: ObjectLikeType[_, _] ⇒ o.getField(schema, astField.name).headOption - case _ ⇒ None + case o: ObjectLikeType[_, _] => o.getField(schema, astField.name).headOption + case _ => None } } } diff --git a/src/main/scala/sangria/validation/Violation.scala b/src/main/scala/sangria/validation/Violation.scala index 9d3294ee..571780fe 100644 --- a/src/main/scala/sangria/validation/Violation.scala +++ b/src/main/scala/sangria/validation/Violation.scala @@ -32,8 +32,8 @@ trait AstNodeLocation { def simpleErrorMessage: String lazy val astLocation = (for { - sm ← sourceMapper - } yield locations map (p ⇒ s" ${sm.renderLocation(p)}:\n${sm.renderLinePosition(p)}") mkString "\n") getOrElse "" + sm <- sourceMapper + } yield locations map (p => s" ${sm.renderLocation(p)}:\n${sm.renderLinePosition(p)}") mkString "\n") getOrElse "" final def errorMessage = simpleErrorMessage + astLocation } @@ -61,18 +61,18 @@ case object EnumCoercionViolation extends ValueCoercionViolation(s"Enum value ex case class FieldCoercionViolation(fieldPath: List[String], valueViolation: Violation, ownSourceMapper: Option[SourceMapper], ownLocations: List[AstLocation], errorPrefix: String, isArgument: Boolean) extends AstNodeViolation { lazy val sourceMapper = valueViolation match { - case astv: AstNodeViolation ⇒ astv.sourceMapper - case _ ⇒ ownSourceMapper + case astv: AstNodeViolation => astv.sourceMapper + case _ => ownSourceMapper } lazy val locations = valueViolation match { - case astv: AstNodeViolation ⇒ (ownLocations ++ astv.locations).distinct - case _ ⇒ ownLocations + case astv: AstNodeViolation => (ownLocations ++ astv.locations).distinct + case _ => ownLocations } lazy val violationMessage = valueViolation match { - case astv: AstNodeViolation ⇒ astv.simpleErrorMessage - case v ⇒ v.errorMessage + case astv: AstNodeViolation => astv.simpleErrorMessage + case v => v.errorMessage } lazy val simpleErrorMessage = s"${errorPrefix}${if (isArgument) "Argument" else "Field"} '${fieldPath mkString "."}' has wrong value: $violationMessage." @@ -80,18 +80,18 @@ case class FieldCoercionViolation(fieldPath: List[String], valueViolation: Viola case class VarTypeMismatchViolation(definitionName: String, expectedType: String, input: Option[String], violation: Violation, ownSourceMapper: Option[SourceMapper], ownLocations: List[AstLocation]) extends AstNodeViolation { lazy val sourceMapper = violation match { - case astv: AstNodeViolation ⇒ astv.sourceMapper - case _ ⇒ ownSourceMapper + case astv: AstNodeViolation => astv.sourceMapper + case _ => ownSourceMapper } lazy val locations = violation match { - case astv: AstNodeViolation ⇒ (ownLocations ++ astv.locations).distinct - case _ ⇒ ownLocations + case astv: AstNodeViolation => (ownLocations ++ astv.locations).distinct + case _ => ownLocations } lazy val violationMessage = violation match { - case astv: AstNodeViolation ⇒ astv.simpleErrorMessage - case v ⇒ v.errorMessage + case astv: AstNodeViolation => astv.simpleErrorMessage + case v => v.errorMessage } lazy val simpleErrorMessage = s"Variable '$$$definitionName' expected value of type '$expectedType' but ${input map ("got: " + _) getOrElse "value is undefined"}. Reason: $violationMessage" @@ -113,18 +113,18 @@ case class InputObjectTypeMismatchViolation(fieldPath: List[String], typeName: S case class BadValueViolation(typeName: String, value: String, violation: Option[Violation], ownSourceMapper: Option[SourceMapper], ownLocations: List[AstLocation]) extends AstNodeViolation { lazy val sourceMapper = violation match { - case Some(astv: AstNodeViolation) ⇒ astv.sourceMapper - case _ ⇒ ownSourceMapper + case Some(astv: AstNodeViolation) => astv.sourceMapper + case _ => ownSourceMapper } lazy val locations = violation match { - case Some(astv: AstNodeViolation) ⇒ (ownLocations ++ astv.locations).distinct - case _ ⇒ ownLocations + case Some(astv: AstNodeViolation) => (ownLocations ++ astv.locations).distinct + case _ => ownLocations } lazy val violationMessage = violation map { - case astv: AstNodeViolation ⇒ astv.simpleErrorMessage - case v ⇒ v.errorMessage + case astv: AstNodeViolation => astv.simpleErrorMessage + case v => v.errorMessage } lazy val simpleErrorMessage = s"Expected type '$typeName', found '$value'.${violationMessage.fold("")(" " + _)}" @@ -132,18 +132,18 @@ case class BadValueViolation(typeName: String, value: String, violation: Option[ case class InvalidInputDocumentViolation(typeName: String, value: String, violation: Violation, ownSourceMapper: Option[SourceMapper], ownLocations: List[AstLocation]) extends AstNodeViolation { lazy val sourceMapper = violation match { - case astv: AstNodeViolation ⇒ astv.sourceMapper - case _ ⇒ ownSourceMapper + case astv: AstNodeViolation => astv.sourceMapper + case _ => ownSourceMapper } lazy val locations = violation match { - case astv: AstNodeViolation ⇒ (ownLocations ++ astv.locations).distinct - case _ ⇒ ownLocations + case astv: AstNodeViolation => (ownLocations ++ astv.locations).distinct + case _ => ownLocations } lazy val violationMessage = violation match { - case astv: AstNodeViolation ⇒ astv.simpleErrorMessage - case v ⇒ v.errorMessage + case astv: AstNodeViolation => astv.simpleErrorMessage + case v => v.errorMessage } lazy val simpleErrorMessage = s"At path $violationMessage" @@ -151,18 +151,18 @@ case class InvalidInputDocumentViolation(typeName: String, value: String, violat case class BadValueForDefaultArgViolation(varName: String, typeName: String, value: String, violation: Violation, ownSourceMapper: Option[SourceMapper], ownLocations: List[AstLocation]) extends AstNodeViolation { lazy val sourceMapper = violation match { - case astv: AstNodeViolation ⇒ astv.sourceMapper - case _ ⇒ ownSourceMapper + case astv: AstNodeViolation => astv.sourceMapper + case _ => ownSourceMapper } lazy val locations = violation match { - case astv: AstNodeViolation ⇒ (ownLocations ++ astv.locations).distinct - case _ ⇒ ownLocations + case astv: AstNodeViolation => (ownLocations ++ astv.locations).distinct + case _ => ownLocations } lazy val violationMessage = violation match { - case astv: AstNodeViolation ⇒ astv.simpleErrorMessage - case v ⇒ v.errorMessage + case astv: AstNodeViolation => astv.simpleErrorMessage + case v => v.errorMessage } lazy val simpleErrorMessage = s"Variable '$$$varName' of type '$typeName' has invalid default value: $value. Reason: $violationMessage" @@ -181,7 +181,7 @@ case class UndefinedFieldViolation( locations: List[AstLocation] ) extends AstNodeViolation with SpecViolation { val code = "undefinedField" - val args = Map("fieldName" → fieldName, "type" → typeName) + val args = Map("fieldName" -> fieldName, "type" -> typeName) lazy val simpleErrorMessage = { val message = s"Cannot query field '$fieldName' on type '$typeName'." @@ -195,42 +195,42 @@ case class UndefinedFieldViolation( case class InlineFragmentOnNonCompositeErrorViolation(typeName: String, sourceMapper: Option[SourceMapper], locations: List[AstLocation]) extends AstNodeViolation with SpecViolation { val code = "inlineFragmentOnNonCompositeType" - val args = Map("type" → typeName) + val args = Map("type" -> typeName) lazy val simpleErrorMessage = s"Fragment cannot condition on non composite type '$typeName'." } case class FragmentOnNonCompositeErrorViolation(fragName: String, typeName: String, sourceMapper: Option[SourceMapper], locations: List[AstLocation]) extends AstNodeViolation with SpecViolation { val code = "fragmentOnNonCompositeType" - val args = Map("fragmentName" → fragName, "type" → typeName) + val args = Map("fragmentName" -> fragName, "type" -> typeName) lazy val simpleErrorMessage = s"Fragment '$fragName' cannot condition on non composite type '$typeName'." } case class UnknownArgViolation(argName: String, fieldName: String, typeName: String, suggestedArgs: Seq[String], sourceMapper: Option[SourceMapper], locations: List[AstLocation]) extends AstNodeViolation with SpecViolation { val code = "unknownArgument" - val args = Map("argumentName" → argName, "fieldName" → fieldName, "typeName" → typeName) + val args = Map("argumentName" -> argName, "fieldName" -> fieldName, "typeName" -> typeName) lazy val simpleErrorMessage = s"Unknown argument '$argName' on field '$fieldName' of type '$typeName'.${Violation.didYouMean(suggestedArgs)}" } case class UnknownDirectiveArgViolation(argName: String, dirName: String, suggestedArgs: Seq[String], sourceMapper: Option[SourceMapper], locations: List[AstLocation]) extends AstNodeViolation with SpecViolation { val code = "unknownDirectiveArgument" - val args = Map("argumentName" → argName, "directiveName" → dirName) + val args = Map("argumentName" -> argName, "directiveName" -> dirName) lazy val simpleErrorMessage = s"Unknown argument '$argName' on directive '$dirName'.${Violation.didYouMean(suggestedArgs)}" } case class UnknownDirectiveViolation(name: String, sourceMapper: Option[SourceMapper], locations: List[AstLocation]) extends AstNodeViolation with SpecViolation { val code = "unknownDirective" - val args = Map("directiveName" → name) + val args = Map("directiveName" -> name) lazy val simpleErrorMessage = s"Unknown directive '$name'." } case class MisplacedDirectiveViolation(name: String, correctPlacement: Option[(DirectiveLocation.Value, String)], sourceMapper: Option[SourceMapper], locations: List[AstLocation]) extends AstNodeViolation with SpecViolation { val code = "misplacedDirective" - val args = Map("directiveName" → name, "location" → correctPlacement.map(loc ⇒ DirectiveLocation.toSpecString(loc._1)).getOrElse("here")) + val args = Map("directiveName" -> name, "location" -> correctPlacement.map(loc => DirectiveLocation.toSpecString(loc._1)).getOrElse("here")) lazy val simpleErrorMessage = s"Directive '$name' may not be used ${correctPlacement.fold("here")("on " + _._2)}." } @@ -265,25 +265,25 @@ case class UnusedVariableViolation(name: String, operationName: Option[String], case class NoSubselectionAllowedViolation(fieldName: String, typeName: String, sourceMapper: Option[SourceMapper], locations: List[AstLocation]) extends AstNodeViolation with SpecViolation { val code = "noSubselectionAllowed" - val args = Map("fieldName" → fieldName, "type" → typeName) + val args = Map("fieldName" -> fieldName, "type" -> typeName) lazy val simpleErrorMessage = s"Field '$fieldName' of type '$typeName' must not have a sub selection." } case class SubscriptionSingleFieldOnlyViolation(opName: Option[String], sourceMapper: Option[SourceMapper], locations: List[AstLocation]) extends AstNodeViolation { - lazy val simpleErrorMessage = s"${opName.fold("Anonymous Subscription")(n ⇒ s"Subscription '$n'")} must select only one top level field." + lazy val simpleErrorMessage = s"${opName.fold("Anonymous Subscription")(n => s"Subscription '$n'")} must select only one top level field." } case class RequiredSubselectionViolation(fieldName: String, typeName: String, sourceMapper: Option[SourceMapper], locations: List[AstLocation]) extends AstNodeViolation with SpecViolation { val code = "requiredSubselection" - val args = Map("fieldName" → fieldName, "type" → typeName) + val args = Map("fieldName" -> fieldName, "type" -> typeName) lazy val simpleErrorMessage = s"Field '$fieldName' of type '$typeName' must have a sub selection." } case class NonExecutableDefinitionViolation(definitionName: String, definition: Definition, sourceMapper: Option[SourceMapper], locations: List[AstLocation]) extends AstNodeViolation with SpecViolation { val code = "nonExecutableDefinition" - val args = Map("defName" → definitionName) + val args = Map("defName" -> definitionName) lazy val simpleErrorMessage = s"The '$definitionName' definition is not executable." } @@ -316,8 +316,8 @@ case class FieldsConflictViolation(outputName: String, reason: Either[String, Ve lazy val simpleErrorMessage = s"Field '$outputName' conflict because ${reasonMessage(reason)}. Use different aliases on the fields to fetch both if this was intentional." private def reasonMessage(reason: Either[String, Vector[ConflictReason]]): String = reason match { - case Left(message) ⇒ message - case Right(subReasons) ⇒ subReasons map (sr ⇒ s"subfields '${sr.fieldName}' conflict because ${reasonMessage(sr.reason)}") mkString " and " + case Left(message) => message + case Right(subReasons) => subReasons map (sr => s"subfields '${sr.fieldName}' conflict because ${reasonMessage(sr.reason)}") mkString " and " } } @@ -380,24 +380,24 @@ trait PathBasedViolation { case class ListValueViolation(index: Int, violation: Violation, listSourceMapper: Option[SourceMapper], listPosition: List[AstLocation]) extends AstNodeViolation with PathBasedViolation { lazy val sourceMapper = violation match { - case astv: AstNodeViolation ⇒ astv.sourceMapper - case _ ⇒ listSourceMapper + case astv: AstNodeViolation => astv.sourceMapper + case _ => listSourceMapper } lazy val locations = violation match { - case astv: AstNodeViolation ⇒ listPosition ++ astv.locations - case _ ⇒ listPosition + case astv: AstNodeViolation => listPosition ++ astv.locations + case _ => listPosition } lazy val pathString = violation match { - case pbv: PathBasedViolation ⇒ s"[$index]" + pbv.pathString - case _ ⇒ s"[$index]" + case pbv: PathBasedViolation => s"[$index]" + pbv.pathString + case _ => s"[$index]" } lazy val errorMessageWithoutPath = violation match { - case pbv: PathBasedViolation ⇒ pbv.errorMessageWithoutPath - case v: AstNodeLocation ⇒ v.simpleErrorMessage - case v ⇒ v.errorMessage + case pbv: PathBasedViolation => pbv.errorMessageWithoutPath + case v: AstNodeLocation => v.simpleErrorMessage + case v => v.errorMessage } lazy val simpleErrorMessage = s"'$pathString' $errorMessageWithoutPath" @@ -405,24 +405,24 @@ case class ListValueViolation(index: Int, violation: Violation, listSourceMapper case class MapValueViolation(fieldName: String, violation: Violation, mapSourceMapper: Option[SourceMapper], mapPosition: List[AstLocation]) extends AstNodeViolation with PathBasedViolation { lazy val sourceMapper = violation match { - case astv: AstNodeViolation ⇒ astv.sourceMapper - case _ ⇒ mapSourceMapper + case astv: AstNodeViolation => astv.sourceMapper + case _ => mapSourceMapper } lazy val locations = violation match { - case astv: AstNodeViolation ⇒ mapPosition ++ astv.locations - case _ ⇒ mapPosition + case astv: AstNodeViolation => mapPosition ++ astv.locations + case _ => mapPosition } lazy val pathString = violation match { - case pbv: PathBasedViolation ⇒ "." + fieldName + pbv.pathString - case _ ⇒ "." + fieldName + case pbv: PathBasedViolation => "." + fieldName + pbv.pathString + case _ => "." + fieldName } lazy val errorMessageWithoutPath = violation match { - case pbv: PathBasedViolation ⇒ pbv.errorMessageWithoutPath - case v: AstNodeLocation ⇒ v.simpleErrorMessage - case v ⇒ v.errorMessage + case pbv: PathBasedViolation => pbv.errorMessageWithoutPath + case v: AstNodeLocation => v.simpleErrorMessage + case v => v.errorMessage } lazy val simpleErrorMessage = s"'${pathString substring 1}' $errorMessageWithoutPath" @@ -607,4 +607,4 @@ case class ExistingTypeViolation(typeName: String, sourceMapper: Option[SourceMa case class InvalidTypeUsageViolation(expectedTypeKind: String, tpe: String, sourceMapper: Option[SourceMapper], locations: List[AstLocation]) extends AstNodeViolation { lazy val simpleErrorMessage = s"Type '$tpe' is not an $expectedTypeKind type." -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/ExecutableDefinitions.scala b/src/main/scala/sangria/validation/rules/ExecutableDefinitions.scala index 0bce663a..d96c1c69 100644 --- a/src/main/scala/sangria/validation/rules/ExecutableDefinitions.scala +++ b/src/main/scala/sangria/validation/rules/ExecutableDefinitions.scala @@ -13,10 +13,10 @@ import sangria.validation._ class ExecutableDefinitions extends ValidationRule { override def visitor(ctx: ValidationContext) = new AstValidatingVisitor { override val onEnter: ValidationVisit = { - case ast.Document(definitions, _, _, _) ⇒ + case ast.Document(definitions, _, _, _) => val errors = definitions.collect { - case d if !d.isInstanceOf[ast.OperationDefinition] && !d.isInstanceOf[ast.FragmentDefinition] ⇒ + case d if !d.isInstanceOf[ast.OperationDefinition] && !d.isInstanceOf[ast.FragmentDefinition] => NonExecutableDefinitionViolation(definitionName(d), d, ctx.sourceMapper, d.location.toList) } @@ -26,12 +26,12 @@ class ExecutableDefinitions extends ValidationRule { } def definitionName(definition: ast.Definition): String = definition match { - case d: ast.FragmentDefinition ⇒ d.name - case d: ast.OperationDefinition ⇒ d.name getOrElse "unnamed operation" - case d: ast.TypeDefinition ⇒ d.name - case d: ast.DirectiveDefinition ⇒ d.name - case d: ast.SchemaDefinition ⇒ "schema" - case d: ast.TypeExtensionDefinition ⇒ d.name - case d: ast.SchemaExtensionDefinition ⇒ "schema" + case d: ast.FragmentDefinition => d.name + case d: ast.OperationDefinition => d.name getOrElse "unnamed operation" + case d: ast.TypeDefinition => d.name + case d: ast.DirectiveDefinition => d.name + case d: ast.SchemaDefinition => "schema" + case d: ast.TypeExtensionDefinition => d.name + case d: ast.SchemaExtensionDefinition => "schema" } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/FieldsOnCorrectType.scala b/src/main/scala/sangria/validation/rules/FieldsOnCorrectType.scala index aeb495e4..54625246 100644 --- a/src/main/scala/sangria/validation/rules/FieldsOnCorrectType.scala +++ b/src/main/scala/sangria/validation/rules/FieldsOnCorrectType.scala @@ -16,9 +16,9 @@ import sangria.validation._ class FieldsOnCorrectType extends ValidationRule { override def visitor(ctx: ValidationContext) = new AstValidatingVisitor { override val onEnter: ValidationVisit = { - case ast.Field(_, name, _, _, _, _, _, pos) ⇒ + case ast.Field(_, name, _, _, _, _, _, pos) => (ctx.typeInfo.previousParentType, ctx.typeInfo.fieldDef) match { - case (Some(parent), None) ⇒ + case (Some(parent), None) => val suggestedTypeNames = collectSuggestedTypes(parent, name) val suggestedFieldNames = if (suggestedTypeNames.nonEmpty) Vector.empty @@ -31,15 +31,15 @@ class FieldsOnCorrectType extends ValidationRule { suggestedFieldNames, ctx.sourceMapper, pos.toList))) - case _ ⇒ + case _ => AstVisitorCommand.RightContinue } } def collectSuggestedFieldNames(schema: Schema[_, _], tpe: CompositeType[_], fieldName: String) = tpe match { - case obj: ObjectLikeType[_, _] ⇒ StringUtil.suggestionList(fieldName, obj.fields map (_.name)) - case _ ⇒ Vector.empty + case obj: ObjectLikeType[_, _] => StringUtil.suggestionList(fieldName, obj.fields map (_.name)) + case _ => Vector.empty } /** @@ -50,9 +50,9 @@ class FieldsOnCorrectType extends ValidationRule { */ private def collectSuggestedTypes(tpe: CompositeType[_], fieldName: String) = tpe match { - case a: AbstractType ⇒ + case a: AbstractType => siblingInterfacesIncludingField(a, fieldName) ++ implementationsIncludingField(a, fieldName) - case _ ⇒ Vector.empty + case _ => Vector.empty } /** @@ -64,10 +64,10 @@ class FieldsOnCorrectType extends ValidationRule { private def siblingInterfacesIncludingField(tpe: AbstractType, fieldName: String) = ctx.schema.possibleTypes(tpe.name) .foldLeft(Map.empty[String, Int]) { - case (oacc, obj) ⇒ obj.interfaces.foldLeft(oacc) { - case (iacc, i) if i.getField(ctx.schema, fieldName).isEmpty ⇒ iacc - case (iacc, i) if iacc contains i.name ⇒ iacc.updated(i.name, iacc(i.name) + 1) - case (iacc, i) ⇒ iacc + (i.name → 1) + case (oacc, obj) => obj.interfaces.foldLeft(oacc) { + case (iacc, i) if i.getField(ctx.schema, fieldName).isEmpty => iacc + case (iacc, i) if iacc contains i.name => iacc.updated(i.name, iacc(i.name) + 1) + case (iacc, i) => iacc + (i.name -> 1) } } .toVector diff --git a/src/main/scala/sangria/validation/rules/FragmentsOnCompositeTypes.scala b/src/main/scala/sangria/validation/rules/FragmentsOnCompositeTypes.scala index 0343be56..4d02f50d 100644 --- a/src/main/scala/sangria/validation/rules/FragmentsOnCompositeTypes.scala +++ b/src/main/scala/sangria/validation/rules/FragmentsOnCompositeTypes.scala @@ -15,18 +15,18 @@ import sangria.validation.{InlineFragmentOnNonCompositeErrorViolation, FragmentO class FragmentsOnCompositeTypes extends ValidationRule { override def visitor(ctx: ValidationContext) = new AstValidatingVisitor { override val onEnter: ValidationVisit = { - case ast.InlineFragment(Some(cond), _, _, _, _, pos) ⇒ + case ast.InlineFragment(Some(cond), _, _, _, _, pos) => ctx.typeInfo.tpe match { - case Some(tpe) if !tpe.isInstanceOf[CompositeType[_]] ⇒ + case Some(tpe) if !tpe.isInstanceOf[CompositeType[_]] => Left(Vector(InlineFragmentOnNonCompositeErrorViolation(cond.name, ctx.sourceMapper, cond.location.toList))) - case _ ⇒ + case _ => AstVisitorCommand.RightContinue } - case ast.FragmentDefinition(name, cond, _, _, _, _, _, pos) ⇒ + case ast.FragmentDefinition(name, cond, _, _, _, _, _, pos) => ctx.typeInfo.tpe match { - case Some(tpe) if !tpe.isInstanceOf[CompositeType[_]] ⇒ + case Some(tpe) if !tpe.isInstanceOf[CompositeType[_]] => Left(Vector(FragmentOnNonCompositeErrorViolation(name, cond.name, ctx.sourceMapper, cond.location.toList))) - case _ ⇒ + case _ => AstVisitorCommand.RightContinue } } diff --git a/src/main/scala/sangria/validation/rules/InputDocumentNonConflictingVariableInference.scala b/src/main/scala/sangria/validation/rules/InputDocumentNonConflictingVariableInference.scala index abae1be1..73918924 100644 --- a/src/main/scala/sangria/validation/rules/InputDocumentNonConflictingVariableInference.scala +++ b/src/main/scala/sangria/validation/rules/InputDocumentNonConflictingVariableInference.scala @@ -17,26 +17,26 @@ class InputDocumentNonConflictingVariableInference extends ValidationRule { private val usedVariables = new mutable.HashMap[String, (ast.Type, List[AstLocation])] override val onEnter: ValidationVisit = { - case _: ast.InputDocument ⇒ + case _: ast.InputDocument => inInputDocument = true AstVisitorCommand.RightContinue - case v: ast.VariableValue if inInputDocument && ctx.typeInfo.inputType.isDefined ⇒ + case v: ast.VariableValue if inInputDocument && ctx.typeInfo.inputType.isDefined => val parentType = ctx.typeInfo.inputType.get val parentTypeAst = SchemaRenderer.renderTypeNameAst(parentType) usedVariables.get(v.name) match { - case Some((existing, otherPos)) if existing != parentTypeAst ⇒ + case Some((existing, otherPos)) if existing != parentTypeAst => Left(Vector(VariableInferenceViolation(v.name, existing.renderCompact, parentTypeAst.renderCompact, ctx.sourceMapper, v.location.toList ++ otherPos))) - case None ⇒ + case None => usedVariables(v.name) = (parentTypeAst, v.location.toList) AstVisitorCommand.RightContinue - case _ ⇒ AstVisitorCommand.RightContinue + case _ => AstVisitorCommand.RightContinue } } override def onLeave = { - case _: ast.InputDocument ⇒ + case _: ast.InputDocument => inInputDocument = false AstVisitorCommand.RightContinue } diff --git a/src/main/scala/sangria/validation/rules/KnownArgumentNames.scala b/src/main/scala/sangria/validation/rules/KnownArgumentNames.scala index 5e62f132..98e914c6 100644 --- a/src/main/scala/sangria/validation/rules/KnownArgumentNames.scala +++ b/src/main/scala/sangria/validation/rules/KnownArgumentNames.scala @@ -15,11 +15,11 @@ import sangria.validation._ class KnownArgumentNames extends ValidationRule { override def visitor(ctx: ValidationContext) = new AstValidatingVisitor { override val onEnter: ValidationVisit = { - case ast.Argument(name, _, _, pos) ⇒ + case ast.Argument(name, _, _, pos) => ctx.typeInfo.ancestors.drop(1).head match { - case _: ast.Field ⇒ + case _: ast.Field => ctx.typeInfo.fieldDef match { - case Some(field) if !field.arguments.exists(_.name == name) ⇒ + case Some(field) if !field.arguments.exists(_.name == name) => Left(Vector(UnknownArgViolation( name, field.name, @@ -27,26 +27,26 @@ class KnownArgumentNames extends ValidationRule { StringUtil.suggestionList(name, field.arguments map (_.name)), ctx.sourceMapper, pos.toList))) - case _ ⇒ + case _ => AstVisitorCommand.RightContinue } - case _: ast.Directive ⇒ + case _: ast.Directive => ctx.typeInfo.directive match { - case Some(dir) if !dir.arguments.exists(_.name == name) ⇒ + case Some(dir) if !dir.arguments.exists(_.name == name) => Left(Vector(UnknownDirectiveArgViolation( name, dir.name, StringUtil.suggestionList(name, dir.arguments map (_.name)), ctx.sourceMapper, pos.toList))) - case _ ⇒ + case _ => AstVisitorCommand.RightContinue } - case _ ⇒ + case _ => AstVisitorCommand.RightContinue } } } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/KnownDirectives.scala b/src/main/scala/sangria/validation/rules/KnownDirectives.scala index f1a57f39..2f6e9cf6 100644 --- a/src/main/scala/sangria/validation/rules/KnownDirectives.scala +++ b/src/main/scala/sangria/validation/rules/KnownDirectives.scala @@ -17,17 +17,17 @@ import sangria.validation._ class KnownDirectives extends ValidationRule { override def visitor(ctx: ValidationContext) = new AstValidatingVisitor { override val onEnter: ValidationVisit = { - case ast.Directive(name, _, _, pos) ⇒ + case ast.Directive(name, _, _, pos) => ctx.schema.directivesByName.get(name) match { - case None ⇒ + case None => Left(Vector(UnknownDirectiveViolation(name, ctx.sourceMapper, pos.toList))) - case Some(dir) ⇒ + case Some(dir) => getCorrectLocation(ctx.typeInfo.ancestors) match { - case None ⇒ + case None => Left(Vector(MisplacedDirectiveViolation(name, None, ctx.sourceMapper, pos.toList))) - case correct @ Some((correctLocation, _)) if !dir.locations.contains(correctLocation) ⇒ + case correct @ Some((correctLocation, _)) if !dir.locations.contains(correctLocation) => Left(Vector(MisplacedDirectiveViolation(name, correct, ctx.sourceMapper, pos.toList))) - case _ ⇒ AstVisitorCommand.RightContinue + case _ => AstVisitorCommand.RightContinue } } } @@ -38,42 +38,42 @@ class KnownDirectives extends ValidationRule { } object KnownDirectives { - def getLocation(node: AstNode, parent: ⇒ AstNode): Option[(DirectiveLocation.Value, String)] = node match { - case op: ast.OperationDefinition if op.operationType == OperationType.Query ⇒ - Some(DirectiveLocation.Query → "query operation") - case op: ast.OperationDefinition if op.operationType == OperationType.Mutation ⇒ - Some(DirectiveLocation.Mutation → "mutation operation") - case op: ast.OperationDefinition if op.operationType == OperationType.Subscription ⇒ - Some(DirectiveLocation.Subscription → "subscription operation") + def getLocation(node: AstNode, parent: => AstNode): Option[(DirectiveLocation.Value, String)] = node match { + case op: ast.OperationDefinition if op.operationType == OperationType.Query => + Some(DirectiveLocation.Query -> "query operation") + case op: ast.OperationDefinition if op.operationType == OperationType.Mutation => + Some(DirectiveLocation.Mutation -> "mutation operation") + case op: ast.OperationDefinition if op.operationType == OperationType.Subscription => + Some(DirectiveLocation.Subscription -> "subscription operation") - case _: ast.Field ⇒ Some(DirectiveLocation.Field → "field") - case _: ast.FragmentDefinition ⇒ Some(DirectiveLocation.FragmentDefinition → "fragment definition") - case _: ast.FragmentSpread ⇒ Some(DirectiveLocation.FragmentSpread → "fragment spread") - case _: ast.InlineFragment ⇒ Some(DirectiveLocation.InlineFragment → "inline fragment") - case _: ast.VariableDefinition ⇒ Some(DirectiveLocation.VariableDefinition → "variable definition") + case _: ast.Field => Some(DirectiveLocation.Field -> "field") + case _: ast.FragmentDefinition => Some(DirectiveLocation.FragmentDefinition -> "fragment definition") + case _: ast.FragmentSpread => Some(DirectiveLocation.FragmentSpread -> "fragment spread") + case _: ast.InlineFragment => Some(DirectiveLocation.InlineFragment -> "inline fragment") + case _: ast.VariableDefinition => Some(DirectiveLocation.VariableDefinition -> "variable definition") - case _: ast.SchemaDefinition ⇒ Some(DirectiveLocation.Schema → "schema definition") - case _: ast.SchemaExtensionDefinition ⇒ Some(DirectiveLocation.Schema → "schema extension definition") - case _: ast.ScalarTypeDefinition ⇒ Some(DirectiveLocation.Scalar → "scalar type definition") - case _: ast.ScalarTypeExtensionDefinition ⇒ Some(DirectiveLocation.Scalar → "scalar type extension definition") - case _: ast.ObjectTypeDefinition ⇒ Some(DirectiveLocation.Object → "object type definition") - case _: ast.ObjectTypeExtensionDefinition ⇒ Some(DirectiveLocation.Object → "object type extension definition") - case _: ast.FieldDefinition ⇒ Some(DirectiveLocation.FieldDefinition → "field definition") - case _: ast.InterfaceTypeDefinition ⇒ Some(DirectiveLocation.Interface → "interface definition") - case _: ast.InterfaceTypeExtensionDefinition ⇒ Some(DirectiveLocation.Interface → "interface extension definition") - case _: ast.UnionTypeDefinition ⇒ Some(DirectiveLocation.Union → "union definition") - case _: ast.UnionTypeExtensionDefinition ⇒ Some(DirectiveLocation.Union → "union extension definition") - case _: ast.EnumTypeDefinition ⇒ Some(DirectiveLocation.Enum → "enum definition") - case _: ast.EnumTypeExtensionDefinition ⇒ Some(DirectiveLocation.Enum → "enum extension definition") - case _: ast.EnumValueDefinition ⇒ Some(DirectiveLocation.EnumValue → "enum value definition") - case _: ast.InputObjectTypeDefinition ⇒ Some(DirectiveLocation.InputObject → "input object type definition") - case _: ast.InputObjectTypeExtensionDefinition ⇒ Some(DirectiveLocation.InputObject → "input object type extension definition") - case _: ast.InputValueDefinition ⇒ + case _: ast.SchemaDefinition => Some(DirectiveLocation.Schema -> "schema definition") + case _: ast.SchemaExtensionDefinition => Some(DirectiveLocation.Schema -> "schema extension definition") + case _: ast.ScalarTypeDefinition => Some(DirectiveLocation.Scalar -> "scalar type definition") + case _: ast.ScalarTypeExtensionDefinition => Some(DirectiveLocation.Scalar -> "scalar type extension definition") + case _: ast.ObjectTypeDefinition => Some(DirectiveLocation.Object -> "object type definition") + case _: ast.ObjectTypeExtensionDefinition => Some(DirectiveLocation.Object -> "object type extension definition") + case _: ast.FieldDefinition => Some(DirectiveLocation.FieldDefinition -> "field definition") + case _: ast.InterfaceTypeDefinition => Some(DirectiveLocation.Interface -> "interface definition") + case _: ast.InterfaceTypeExtensionDefinition => Some(DirectiveLocation.Interface -> "interface extension definition") + case _: ast.UnionTypeDefinition => Some(DirectiveLocation.Union -> "union definition") + case _: ast.UnionTypeExtensionDefinition => Some(DirectiveLocation.Union -> "union extension definition") + case _: ast.EnumTypeDefinition => Some(DirectiveLocation.Enum -> "enum definition") + case _: ast.EnumTypeExtensionDefinition => Some(DirectiveLocation.Enum -> "enum extension definition") + case _: ast.EnumValueDefinition => Some(DirectiveLocation.EnumValue -> "enum value definition") + case _: ast.InputObjectTypeDefinition => Some(DirectiveLocation.InputObject -> "input object type definition") + case _: ast.InputObjectTypeExtensionDefinition => Some(DirectiveLocation.InputObject -> "input object type extension definition") + case _: ast.InputValueDefinition => parent match { - case _: ast.InputObjectTypeDefinition ⇒ Some(DirectiveLocation.InputFieldDefinition → "input field definition") - case _ ⇒ Some(DirectiveLocation.ArgumentDefinition → "argument definition") + case _: ast.InputObjectTypeDefinition => Some(DirectiveLocation.InputFieldDefinition -> "input field definition") + case _ => Some(DirectiveLocation.ArgumentDefinition -> "argument definition") } - case _ ⇒ None + case _ => None } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/KnownFragmentNames.scala b/src/main/scala/sangria/validation/rules/KnownFragmentNames.scala index c1a8042f..f0d0bd1a 100644 --- a/src/main/scala/sangria/validation/rules/KnownFragmentNames.scala +++ b/src/main/scala/sangria/validation/rules/KnownFragmentNames.scala @@ -14,11 +14,11 @@ import sangria.validation._ class KnownFragmentNames extends ValidationRule { override def visitor(ctx: ValidationContext) = new AstValidatingVisitor { override val onEnter: ValidationVisit = { - case ast.FragmentSpread(name, _, _, pos) ⇒ + case ast.FragmentSpread(name, _, _, pos) => ctx.doc.fragments.get(name) match { - case None ⇒ Left(Vector(UnknownFragmentViolation(name, ctx.sourceMapper, pos.toList))) - case _ ⇒ AstVisitorCommand.RightContinue + case None => Left(Vector(UnknownFragmentViolation(name, ctx.sourceMapper, pos.toList))) + case _ => AstVisitorCommand.RightContinue } } } - } \ No newline at end of file + } diff --git a/src/main/scala/sangria/validation/rules/KnownTypeNames.scala b/src/main/scala/sangria/validation/rules/KnownTypeNames.scala index 66e7c23b..83949d10 100644 --- a/src/main/scala/sangria/validation/rules/KnownTypeNames.scala +++ b/src/main/scala/sangria/validation/rules/KnownTypeNames.scala @@ -14,11 +14,11 @@ import sangria.validation._ class KnownTypeNames extends ValidationRule { override def visitor(ctx: ValidationContext) = new AstValidatingVisitor { override val onEnter: ValidationVisit = { - case _: ast.ObjectTypeDefinition | _: ast.InterfaceTypeDefinition | _: ast.UnionTypeDefinition | _: ast.InputObjectTypeDefinition | _: ast.TypeSystemExtensionDefinition | _: ast.SchemaDefinition ⇒ + case _: ast.ObjectTypeDefinition | _: ast.InterfaceTypeDefinition | _: ast.UnionTypeDefinition | _: ast.InputObjectTypeDefinition | _: ast.TypeSystemExtensionDefinition | _: ast.SchemaDefinition => // When validating SDL, at the moment schema does not know about these types. // All type names are validated in the schema materializer as new schema is constructed. AstVisitorCommand.RightSkip - case ast.NamedType(name, pos) ⇒ + case ast.NamedType(name, pos) => if (!ctx.schema.allTypes.contains(name)) Left(Vector(UnknownTypeViolation( name, @@ -29,4 +29,4 @@ class KnownTypeNames extends ValidationRule { AstVisitorCommand.RightContinue } } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/LoneAnonymousOperation.scala b/src/main/scala/sangria/validation/rules/LoneAnonymousOperation.scala index 6ee91253..636afb58 100644 --- a/src/main/scala/sangria/validation/rules/LoneAnonymousOperation.scala +++ b/src/main/scala/sangria/validation/rules/LoneAnonymousOperation.scala @@ -16,14 +16,14 @@ class LoneAnonymousOperation extends ValidationRule { var operationCount = 0 override val onEnter: ValidationVisit = { - case ast.Document(definitions, _, _, _) ⇒ + case ast.Document(definitions, _, _, _) => operationCount = definitions.count(_.isInstanceOf[ast.OperationDefinition]) AstVisitorCommand.RightContinue - case op: ast.OperationDefinition ⇒ + case op: ast.OperationDefinition => if (op.name.isEmpty && operationCount > 1) Left(Vector(AnonOperationNotAloneViolation(ctx.sourceMapper, op.location.toList))) else AstVisitorCommand.RightContinue } } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/NoFragmentCycles.scala b/src/main/scala/sangria/validation/rules/NoFragmentCycles.scala index 348370e3..65890c92 100644 --- a/src/main/scala/sangria/validation/rules/NoFragmentCycles.scala +++ b/src/main/scala/sangria/validation/rules/NoFragmentCycles.scala @@ -4,7 +4,7 @@ import sangria.ast import sangria.ast.AstVisitorCommand import sangria.validation._ -import scala.collection.mutable.{Set ⇒ MutableSet, Map ⇒ MutableMap, Stack ⇒ MutableStack} +import scala.collection.mutable.{Set => MutableSet, Map => MutableMap, Stack => MutableStack} class NoFragmentCycles extends ValidationRule { override def visitor(ctx: ValidationContext) = new AstValidatingVisitor { @@ -22,21 +22,21 @@ class NoFragmentCycles extends ValidationRule { spreadPathIndexByName(fragmentDef.name) = spreadPath.size - spreadNodes.foreach { spreadNode ⇒ + spreadNodes.foreach { spreadNode => spreadPathIndexByName.get(spreadNode.name) match { - case None ⇒ + case None => spreadPath.push(spreadNode) if (!visitedFrags.contains(spreadNode.name)) { ctx.doc.fragments.get(spreadNode.name) match { - case Some(frag) ⇒ errors = errors ++ detectCycleRecursive(frag) - case _ ⇒ // do nothing + case Some(frag) => errors = errors ++ detectCycleRecursive(frag) + case _ => // do nothing } } spreadPath.pop() - case Some(cycleIndex) ⇒ + case Some(cycleIndex) => val cyclePath = spreadPath.toList.reverse.slice(cycleIndex, spreadPath.size) errors = errors :+ CycleErrorViolation( @@ -54,7 +54,7 @@ class NoFragmentCycles extends ValidationRule { } override val onEnter: ValidationVisit = { - case fragmentDef @ ast.FragmentDefinition(fragmentName, _, _, _, _, _, _, _) ⇒ + case fragmentDef @ ast.FragmentDefinition(fragmentName, _, _, _, _, _, _, _) => if (visitedFrags.contains(fragmentName)) AstVisitorCommand.RightSkip else { val errors = detectCycleRecursive(fragmentDef) @@ -63,7 +63,7 @@ class NoFragmentCycles extends ValidationRule { else AstVisitorCommand.RightContinue } - case _: ast.OperationDefinition ⇒ AstVisitorCommand.RightSkip + case _: ast.OperationDefinition => AstVisitorCommand.RightSkip } } - } \ No newline at end of file + } diff --git a/src/main/scala/sangria/validation/rules/NoUndefinedVariables.scala b/src/main/scala/sangria/validation/rules/NoUndefinedVariables.scala index 4a61b5b5..8bdb52a6 100644 --- a/src/main/scala/sangria/validation/rules/NoUndefinedVariables.scala +++ b/src/main/scala/sangria/validation/rules/NoUndefinedVariables.scala @@ -3,7 +3,7 @@ package sangria.validation.rules import sangria.ast import sangria.ast.AstVisitorCommand import sangria.validation._ -import scala.collection.mutable.{Set ⇒ MutableSet} +import scala.collection.mutable.{Set => MutableSet} /** @@ -17,24 +17,24 @@ class NoUndefinedVariables extends ValidationRule { val variableNameDefined = MutableSet[String]() override val onEnter: ValidationVisit = { - case _: ast.OperationDefinition ⇒ + case _: ast.OperationDefinition => variableNameDefined.clear() AstVisitorCommand.RightContinue - case varDef: ast.VariableDefinition ⇒ + case varDef: ast.VariableDefinition => variableNameDefined += varDef.name AstVisitorCommand.RightContinue } override def onLeave: ValidationVisit = { - case operation: ast.OperationDefinition ⇒ + case operation: ast.OperationDefinition => val usages = ctx.documentAnalyzer.getRecursiveVariableUsages(operation) - val errors = usages.filterNot(vu ⇒ variableNameDefined.contains(vu.node.name)).toVector.map { vu ⇒ + val errors = usages.filterNot(vu => variableNameDefined.contains(vu.node.name)).toVector.map { vu => operation.name match { - case Some(opName) ⇒ + case Some(opName) => UndefinedVarByOpViolation(vu.node.name, opName, ctx.sourceMapper, vu.node.location.toList ++ operation.location.toList) - case None ⇒ + case None => UndefinedVarViolation(vu.node.name, ctx.sourceMapper, vu.node.location.toList ++ operation.location.toList) } } @@ -42,4 +42,4 @@ class NoUndefinedVariables extends ValidationRule { if (errors.nonEmpty) Left(errors.distinct) else AstVisitorCommand.RightContinue } } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/NoUnusedFragments.scala b/src/main/scala/sangria/validation/rules/NoUnusedFragments.scala index 8f954001..38b3731d 100644 --- a/src/main/scala/sangria/validation/rules/NoUnusedFragments.scala +++ b/src/main/scala/sangria/validation/rules/NoUnusedFragments.scala @@ -4,7 +4,7 @@ import sangria.ast import sangria.ast.AstVisitorCommand import sangria.validation._ -import scala.collection.mutable.{Set ⇒ MutableSet, ListBuffer} +import scala.collection.mutable.{Set => MutableSet, ListBuffer} /** * No unused fragments @@ -18,28 +18,28 @@ class NoUnusedFragments extends ValidationRule { val operationDefs = ListBuffer[ast.OperationDefinition]() override val onEnter: ValidationVisit = { - case od: ast.OperationDefinition ⇒ + case od: ast.OperationDefinition => operationDefs += od AstVisitorCommand.RightSkip - case fd: ast.FragmentDefinition ⇒ + case fd: ast.FragmentDefinition => fragmentDefs += fd AstVisitorCommand.RightSkip } override def onLeave: ValidationVisit = { - case ast.Document(_, _, _, _) ⇒ + case ast.Document(_, _, _, _) => val fragmentNameUsed = MutableSet[String]() - operationDefs.foreach(operation ⇒ + operationDefs.foreach(operation => ctx.documentAnalyzer.getRecursivelyReferencedFragments(operation) - .foreach(fragment ⇒ fragmentNameUsed += fragment.name)) + .foreach(fragment => fragmentNameUsed += fragment.name)) val errors = fragmentDefs.toVector - .filter(fd ⇒ !fragmentNameUsed.contains(fd.name)) - .map(fd ⇒ UnusedFragmentViolation(fd.name, ctx.sourceMapper, fd.location.toList)) + .filter(fd => !fragmentNameUsed.contains(fd.name)) + .map(fd => UnusedFragmentViolation(fd.name, ctx.sourceMapper, fd.location.toList)) if (errors.nonEmpty) Left(errors) else AstVisitorCommand.RightContinue } } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/NoUnusedVariables.scala b/src/main/scala/sangria/validation/rules/NoUnusedVariables.scala index 3f5fddbc..8dac990b 100644 --- a/src/main/scala/sangria/validation/rules/NoUnusedVariables.scala +++ b/src/main/scala/sangria/validation/rules/NoUnusedVariables.scala @@ -17,24 +17,24 @@ class NoUnusedVariables extends ValidationRule { val variableDefs = ListBuffer[ast.VariableDefinition]() override val onEnter: ValidationVisit = { - case _: ast.OperationDefinition ⇒ + case _: ast.OperationDefinition => variableDefs.clear() AstVisitorCommand.RightContinue - case varDef: ast.VariableDefinition ⇒ + case varDef: ast.VariableDefinition => variableDefs += varDef AstVisitorCommand.RightContinue } override def onLeave: ValidationVisit = { - case operation: ast.OperationDefinition ⇒ + case operation: ast.OperationDefinition => val usages = ctx.documentAnalyzer.getRecursiveVariableUsages(operation) val variableNameUsed = usages.map(_.node.name).toSet - val errors = variableDefs.filterNot(vd ⇒ variableNameUsed.contains(vd.name)).toVector.map(vd ⇒ + val errors = variableDefs.filterNot(vd => variableNameUsed.contains(vd.name)).toVector.map(vd => UnusedVariableViolation(vd.name, operation.name, ctx.sourceMapper, vd.location.toList)) if (errors.nonEmpty) Left(errors.distinct) else AstVisitorCommand.RightContinue } } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/OverlappingFieldsCanBeMerged.scala b/src/main/scala/sangria/validation/rules/OverlappingFieldsCanBeMerged.scala index 0d18fe9f..459f4aa4 100644 --- a/src/main/scala/sangria/validation/rules/OverlappingFieldsCanBeMerged.scala +++ b/src/main/scala/sangria/validation/rules/OverlappingFieldsCanBeMerged.scala @@ -8,7 +8,7 @@ import sangria.ast.AstVisitorCommand import sangria.renderer.{QueryRenderer, SchemaRenderer} import sangria.schema._ import sangria.validation._ -import scala.collection.mutable.{ListBuffer, Set ⇒ MutableSet, ListMap ⇒ MutableMap, LinkedHashSet} +import scala.collection.mutable.{ListBuffer, Set => MutableSet, ListMap => MutableMap, LinkedHashSet} /** * Overlapping fields can be merged @@ -84,11 +84,11 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { * */ override val onEnter: ValidationVisit = { - case selCont: ast.SelectionContainer if selCont.selections.nonEmpty ⇒ + case selCont: ast.SelectionContainer if selCont.selections.nonEmpty => val conflicts = findConflictsWithinSelectionSet(ctx.typeInfo.parentType, selCont, Set.empty) if (conflicts.nonEmpty) - Left(conflicts.toVector.map(c ⇒ FieldsConflictViolation(c.reason.fieldName, c.reason.reason, ctx.sourceMapper, (c.fields1 ++ c.fields2) flatMap (_.location)))) + Left(conflicts.toVector.map(c => FieldsConflictViolation(c.reason.fieldName, c.reason.reason, ctx.sourceMapper, (c.fields1 ++ c.fields2) flatMap (_.location)))) else AstVisitorCommand.RightContinue } @@ -106,10 +106,10 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { // (B) Then collect conflicts between these fields and those represented by // each spread fragment name found. - fragmentNames.zipWithIndex foreach { case (fragmentName, idx) ⇒ + fragmentNames.zipWithIndex foreach { case (fragmentName, idx) => collectConflictsBetweenFieldsAndFragment(conflicts, fieldMap, fragmentName, false, visitedFragments + fragmentName) - for (i ← (idx + 1) until fragmentNamesList.size) + for (i <- (idx + 1) until fragmentNamesList.size) collectConflictsBetweenFragments( conflicts, fragmentName, @@ -138,17 +138,17 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { // response name. For any response name which appears in both provided field // maps, each field from the first field map must be compared to every field // in the second field map to find potential conflicts. - fieldMap1.keys foreach { outputName ⇒ + fieldMap1.keys foreach { outputName => fieldMap2.get(outputName) match { - case Some(fields2) ⇒ + case Some(fields2) => val fields1 = fieldMap1(outputName) for { - f1 ← fields1 - f2 ← fields2 + f1 <- fields1 + f2 <- fields2 } findConflict(outputName, f1, f2, visitedFragments1, visitedFragments2, parentFieldsAreMutuallyExclusive) foreach (conflicts += _) - case None ⇒ // It's ok, do nothing + case None => // It's ok, do nothing } } } @@ -159,13 +159,13 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { // name and the value at that key is a list of all fields which provide that // response name. For every response name, if there are multiple fields, they // must be compared to find a potential conflict. - fieldMap.keys foreach { outputName ⇒ + fieldMap.keys foreach { outputName => val fields = fieldMap(outputName) if (fields.size > 1) for { - i ← 0 until fields.size - j ← (i + 1) until fields.size + i <- 0 until fields.size + j <- (i + 1) until fields.size } findConflict(outputName, fields(i), fields(j), visitedFragments, visitedFragments, false) foreach (conflicts += _) } } @@ -174,17 +174,17 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { parentType: Option[CompositeType[_]], selCont: ast.SelectionContainer, visitedFragments: Set[String]): (MutableMap[String, ListBuffer[AstAndDef]], LinkedHashSet[String]) = { - val cacheKey = visitedFragments → selCont.selections + val cacheKey = visitedFragments -> selCont.selections cachedFieldsAndFragmentNames.get(cacheKey) match { - case Some(cached) ⇒ cached - case None ⇒ + case Some(cached) => cached + case None => val astAndDefs = MutableMap[String, ListBuffer[AstAndDef]]() val fragmentNames = mutable.LinkedHashSet[String]() collectFieldsAndFragmentNames(parentType, selCont, astAndDefs, fragmentNames, visitedFragments) - val cached = astAndDefs → fragmentNames + val cached = astAndDefs -> fragmentNames cachedFieldsAndFragmentNames(cacheKey) = cached cached @@ -194,9 +194,9 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { // Given a reference to a fragment, return the represented collection of fields // as well as a list of nested fragment names referenced via fragment spreads. def getReferencedFieldsAndFragmentNames(fragment: ast.FragmentDefinition, visitedFragments: Set[String]): (MutableMap[String, ListBuffer[AstAndDef]], LinkedHashSet[String]) = { - cachedFieldsAndFragmentNames.get(visitedFragments → fragment.selections) match { - case Some(cached) ⇒ cached - case None ⇒ + cachedFieldsAndFragmentNames.get(visitedFragments -> fragment.selections) match { + case Some(cached) => cached + case None => val fragmentType = ctx.schema.getOutputType(fragment.typeCondition, true).asInstanceOf[Option[CompositeType[_]]] getFieldsAndFragmentNames(fragmentType, fragment, visitedFragments) @@ -210,15 +210,15 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { fragmentNames: MutableSet[String], visitedFragments: Set[String]): Unit = { selCont.selections foreach { - case field: ast.Field ⇒ + case field: ast.Field => val fieldDef: Option[Field[_, _]] = parentType flatMap { - case obj: ObjectLikeType[_, _] ⇒ obj.getField(ctx.schema, field.name).headOption - case _ ⇒ None + case obj: ObjectLikeType[_, _] => obj.getField(ctx.schema, field.name).headOption + case _ => None } val astAndDef = astAndDefs.get(field.outputName) match { - case Some(list) ⇒ list - case None ⇒ + case Some(list) => list + case None => val list = ListBuffer.empty[AstAndDef] astAndDefs(field.outputName) = list list @@ -226,14 +226,14 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { astAndDef += AstAndDef(field, parentType, fieldDef) - case fragment: ast.FragmentSpread if visitedFragments contains fragment.name ⇒ + case fragment: ast.FragmentSpread if visitedFragments contains fragment.name => // This means a fragment spread in itself. We're going to infinite loop // if we try and collect all fields. Pretend we did not index that fragment - case fragment: ast.FragmentSpread ⇒ + case fragment: ast.FragmentSpread => fragmentNames += fragment.name - case fragment: ast.InlineFragment ⇒ + case fragment: ast.InlineFragment => val inlineFragmentType = fragment.typeCondition flatMap (ctx.schema.getOutputType(_, true)) orElse parentType collectFieldsAndFragmentNames(inlineFragmentType, fragment, astAndDefs, fragmentNames, visitedFragments) @@ -259,8 +259,8 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { // in the current state of the schema, then perhaps in some future version, // thus may not safely diverge. val areMutuallyExclusive = parentFieldsAreMutuallyExclusive || ((parentType1, parentType2) match { - case (Some(pt1: ObjectType[_, _]), Some(pt2: ObjectType[_, _])) if pt1.name != pt2.name ⇒ true - case _ ⇒ false + case (Some(pt1: ObjectType[_, _]), Some(pt2: ObjectType[_, _])) if pt1.name != pt2.name => true + case _ => false }) if (!areMutuallyExclusive && ast1.name != ast2.name) @@ -269,8 +269,8 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { Some(Conflict(ConflictReason(outputName, Left("they have differing arguments")), ast1 :: Nil, ast2 :: Nil)) else { val typeRes = for { - field1 ← def1 - field2 ← def2 + field1 <- def1 + field2 <- def2 } yield if (doTypesConflict(field1.fieldType, field2.fieldType)) { val type1 = SchemaRenderer.renderTypeName(field1.fieldType) val type2 = SchemaRenderer.renderTypeName(field2.fieldType) @@ -279,10 +279,10 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { } else None typeRes.flatten match { - case s @ Some(_) ⇒ s - case None ⇒ - val type1 = def1 map (d ⇒ d.fieldType.namedType) - val type2 = def2 map (d ⇒ d.fieldType.namedType) + case s @ Some(_) => s + case None => + val type1 = def1 map (d => d.fieldType.namedType) + val type2 = def2 map (d => d.fieldType.namedType) val conflicts = findConflictsBetweenSubSelectionSets( areMutuallyExclusive, type1.asInstanceOf[Option[CompositeType[_]]], @@ -318,20 +318,20 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { // (I) Then collect conflicts between the first collection of fields and // those referenced by each fragment name associated with the second. - fragmentNames2 foreach (fragmentName ⇒ + fragmentNames2 foreach (fragmentName => collectConflictsBetweenFieldsAndFragment(conflicts, fieldMap1, fragmentName, areMutuallyExclusive, visitedFragments2 + fragmentName)) // (I) Then collect conflicts between the second collection of fields and // those referenced by each fragment name associated with the first. - fragmentNames1 foreach (fragmentName ⇒ + fragmentNames1 foreach (fragmentName => collectConflictsBetweenFieldsAndFragment(conflicts, fieldMap2, fragmentName, areMutuallyExclusive, visitedFragments1 + fragmentName)) // (J) Also collect conflicts between any fragment names by the first and // fragment names by the second. This compares each item in the first set of // names to each item in the second set of names. for { - fragmentName1 ← fragmentNames1 - fragmentName2 ← fragmentNames2 + fragmentName1 <- fragmentNames1 + fragmentName2 <- fragmentNames2 } collectConflictsBetweenFragments(conflicts, fragmentName1, fragmentName2, visitedFragments1 + fragmentName1, visitedFragments2 + fragmentName2, areMutuallyExclusive) conflicts @@ -347,14 +347,14 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { visitedFragments2: Set[String], areMutuallyExclusive: Boolean): Unit = { (ctx.doc.fragments.get(fragmentName1), ctx.doc.fragments.get(fragmentName2)) match { - case (None, _) | (_, None) ⇒ // do nothing + case (None, _) | (_, None) => // do nothing - case (Some(f1), Some(f2)) if f1.name == f2.name ⇒ + case (Some(f1), Some(f2)) if f1.name == f2.name => // No need to compare a fragment to itself. - case (Some(f1), Some(f2)) if comparedFragments.contains(f1.name, f2.name, areMutuallyExclusive) ⇒ + case (Some(f1), Some(f2)) if comparedFragments.contains(f1.name, f2.name, areMutuallyExclusive) => // Memoize so two fragments are not compared for conflicts more than once. - case (Some(f1), Some(f2)) ⇒ + case (Some(f1), Some(f2)) => comparedFragments.add(f1.name, f2.name, areMutuallyExclusive) val (fieldMap1, fragmentNames1) = getReferencedFieldsAndFragmentNames(f1, visitedFragments1) @@ -366,12 +366,12 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { // (G) Then collect conflicts between the first fragment and any nested // fragments spread in the second fragment. - fragmentNames2 foreach (fragmentName ⇒ + fragmentNames2 foreach (fragmentName => collectConflictsBetweenFragments(conflicts, fragmentName1, fragmentName, visitedFragments1, visitedFragments2 + fragmentName, areMutuallyExclusive)) // (G) Then collect conflicts between the first fragment and any nested // fragments spread in the second fragment. - fragmentNames1 foreach (fragmentName ⇒ + fragmentNames1 foreach (fragmentName => collectConflictsBetweenFragments(conflicts, fragmentName, fragmentName2, visitedFragments1 + fragmentName, visitedFragments2, areMutuallyExclusive)) } } @@ -383,7 +383,7 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { areMutuallyExclusive: Boolean, visitedFragments: Set[String]): Unit = { ctx.doc.fragments.get(fragmentName) match { - case Some(fragment) ⇒ + case Some(fragment) => val (fieldMap2, fragmentNames2) = getReferencedFieldsAndFragmentNames(fragment, visitedFragments) // (D) First collect any conflicts between the provided collection of fields @@ -392,10 +392,10 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { // (E) Then collect any conflicts between the provided collection of fields // and any fragment names found in the given fragment. - fragmentNames2 foreach (fragmentName ⇒ + fragmentNames2 foreach (fragmentName => collectConflictsBetweenFieldsAndFragment(conflicts, fieldMap, fragmentName, areMutuallyExclusive, visitedFragments + fragmentName)) - case None ⇒ // do nothing + case None => // do nothing } } @@ -403,8 +403,8 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { def subfieldConflicts(conflicts: Seq[Conflict], outputName: String, ast1: ast.Field, ast2: ast.Field): Option[Conflict] = if (conflicts.nonEmpty) Some(Conflict(ConflictReason(outputName, Right(conflicts map (_.reason) toVector)), - conflicts.foldLeft(ast1 :: Nil){case (acc, Conflict(_, fields, _)) ⇒ acc ++ fields}, - conflicts.foldLeft(ast2 :: Nil){case (acc, Conflict(_, _, fields)) ⇒ acc ++ fields})) + conflicts.foldLeft(ast1 :: Nil){case (acc, Conflict(_, fields, _)) => acc ++ fields}, + conflicts.foldLeft(ast2 :: Nil){case (acc, Conflict(_, _, fields)) => acc ++ fields})) else None @@ -412,21 +412,21 @@ class OverlappingFieldsCanBeMerged extends ValidationRule { // Composite types are ignored as their individual field types will be compared // later recursively. However List and Non-Null types must match. def doTypesConflict(type1: OutputType[_], type2: OutputType[_]): Boolean = (type1, type2) match { - case (ListType(ot1), ListType(ot2)) ⇒ doTypesConflict(ot1, ot2) - case (ListType(_), _) | (_, ListType(_)) ⇒ true - case (OptionType(ot1), OptionType(ot2)) ⇒ doTypesConflict(ot1, ot2) - case (OptionType(_), _) | (_, OptionType(_)) ⇒ true - case (nt1: LeafType, nt2: Named) ⇒ nt1.name != nt2.name - case (nt1: Named, nt2: LeafType) ⇒ nt1.name != nt2.name - case _ ⇒ false + case (ListType(ot1), ListType(ot2)) => doTypesConflict(ot1, ot2) + case (ListType(_), _) | (_, ListType(_)) => true + case (OptionType(ot1), OptionType(ot2)) => doTypesConflict(ot1, ot2) + case (OptionType(_), _) | (_, OptionType(_)) => true + case (nt1: LeafType, nt2: Named) => nt1.name != nt2.name + case (nt1: Named, nt2: LeafType) => nt1.name != nt2.name + case _ => false } def sameArguments(args1: Vector[ast.Argument], args2: Vector[ast.Argument]) = if (args1.size != args2.size) false - else args1.forall { a1 ⇒ + else args1.forall { a1 => args2.find(_.name == a1.name) match { - case Some(a2) ⇒ sameValue(a1.value, a2.value) - case None ⇒ false + case Some(a2) => sameValue(a1.value, a2.value) + case None => false } } @@ -448,13 +448,13 @@ private class PairSet[T] { private val data = MutableMap[(T, T), Boolean]() def contains(a: T, b: T, areMutuallyExclusive: Boolean) = - data get (a → b) match { - case None ⇒ false + data get (a -> b) match { + case None => false // areMutuallyExclusive being false is a superset of being true, // hence if we want to know if this PairSet "has" these two with no // exclusivity, we have to ensure it was added as such. - case Some(res) if !areMutuallyExclusive ⇒ !res - case Some(_) ⇒ true + case Some(res) if !areMutuallyExclusive => !res + case Some(_) => true } def add(a: T, b: T, areMutuallyExclusive: Boolean) = { @@ -463,5 +463,5 @@ private class PairSet[T] { } private def addPair(a: T, b: T, areMutuallyExclusive: Boolean) = - data(a → b) = areMutuallyExclusive + data(a -> b) = areMutuallyExclusive } diff --git a/src/main/scala/sangria/validation/rules/PossibleFragmentSpreads.scala b/src/main/scala/sangria/validation/rules/PossibleFragmentSpreads.scala index e67c67a7..79c842a5 100644 --- a/src/main/scala/sangria/validation/rules/PossibleFragmentSpreads.scala +++ b/src/main/scala/sangria/validation/rules/PossibleFragmentSpreads.scala @@ -17,10 +17,10 @@ import sangria.validation._ class PossibleFragmentSpreads extends ValidationRule { override def visitor(ctx: ValidationContext) = new AstValidatingVisitor { override val onEnter: ValidationVisit = { - case f: ast.InlineFragment ⇒ + case f: ast.InlineFragment => val errors = for { - tpe ← ctx.typeInfo.tpe - parent ← ctx.typeInfo.previousParentType + tpe <- ctx.typeInfo.tpe + parent <- ctx.typeInfo.previousParentType } yield if (!doTypesOverlap(ctx, tpe, parent)) Vector(TypeIncompatibleAnonSpreadViolation( @@ -32,13 +32,13 @@ class PossibleFragmentSpreads extends ValidationRule { else Vector.empty errors match { - case Some(errors) if errors.nonEmpty ⇒ Left(errors) - case _ ⇒ AstVisitorCommand.RightContinue + case Some(errors) if errors.nonEmpty => Left(errors) + case _ => AstVisitorCommand.RightContinue } - case fs: ast.FragmentSpread ⇒ + case fs: ast.FragmentSpread => val errors = for { - tpe ← ctx.typeInfo.tpe - parent ← ctx.typeInfo.previousParentType + tpe <- ctx.typeInfo.tpe + parent <- ctx.typeInfo.previousParentType } yield if (!doTypesOverlap(ctx, tpe, parent)) Vector(TypeIncompatibleSpreadViolation( @@ -51,22 +51,22 @@ class PossibleFragmentSpreads extends ValidationRule { else Vector.empty errors match { - case Some(errors) if errors.nonEmpty ⇒ Left(errors) - case _ ⇒ AstVisitorCommand.RightContinue + case Some(errors) if errors.nonEmpty => Left(errors) + case _ => AstVisitorCommand.RightContinue } } def doTypesOverlap(ctx: ValidationContext, type1: Type, type2: Type) = (type1, type2) match { - case (t1: Named, t2: Named) if t1.name == t2.name ⇒ true - case (t1: ObjectType[_, _], t2: ObjectType[_, _]) ⇒ false - case (t1: ObjectType[_, _], t2: AbstractType) ⇒ + case (t1: Named, t2: Named) if t1.name == t2.name => true + case (t1: ObjectType[_, _], t2: ObjectType[_, _]) => false + case (t1: ObjectType[_, _], t2: AbstractType) => ctx.schema.isPossibleType(t2.name, t1) - case (t1: AbstractType, t2: ObjectType[_, _]) ⇒ + case (t1: AbstractType, t2: ObjectType[_, _]) => ctx.schema.isPossibleType(t1.name, t2) - case (t1: AbstractType, t2: Named) ⇒ + case (t1: AbstractType, t2: Named) => val t1TypeNames = ctx.schema.possibleTypes(t1.name).map(_.name).toSet - ctx.schema possibleTypes t2.name exists (t ⇒ t1TypeNames.contains(t.name)) - case _ ⇒ false + ctx.schema possibleTypes t2.name exists (t => t1TypeNames.contains(t.name)) + case _ => false } } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/ProvidedRequiredArguments.scala b/src/main/scala/sangria/validation/rules/ProvidedRequiredArguments.scala index 0ac9b85b..1fe0f1df 100644 --- a/src/main/scala/sangria/validation/rules/ProvidedRequiredArguments.scala +++ b/src/main/scala/sangria/validation/rules/ProvidedRequiredArguments.scala @@ -15,28 +15,28 @@ import sangria.validation._ class ProvidedRequiredArguments extends ValidationRule { override def visitor(ctx: ValidationContext) = new AstValidatingVisitor { override val onLeave: ValidationVisit = { - case ast.Field(_, name, args, _, _, _, _, pos) ⇒ + case ast.Field(_, name, args, _, _, _, _, pos) => ctx.typeInfo.fieldDef match { - case None ⇒ AstVisitorCommand.RightContinue - case Some(fieldDef) ⇒ + case None => AstVisitorCommand.RightContinue + case Some(fieldDef) => val astArgs = args.map(_.name).toSet val errors = fieldDef.arguments.toVector.collect { - case argDef if !astArgs.contains(argDef.name) && !argDef.argumentType.isOptional && argDef.defaultValue.isEmpty ⇒ + case argDef if !astArgs.contains(argDef.name) && !argDef.argumentType.isOptional && argDef.defaultValue.isEmpty => MissingFieldArgViolation(name, argDef.name, SchemaRenderer.renderTypeName(argDef.argumentType), ctx.sourceMapper, pos.toList) } if (errors.nonEmpty) Left(errors) else AstVisitorCommand.RightContinue } - case ast.Directive(name, args, _, pos) ⇒ + case ast.Directive(name, args, _, pos) => ctx.typeInfo.directive match { - case None ⇒ AstVisitorCommand.RightContinue - case Some(dirDef) ⇒ + case None => AstVisitorCommand.RightContinue + case Some(dirDef) => val astArgs = args.map(_.name).toSet val errors = dirDef.arguments.toVector.collect { - case argDef if !astArgs.contains(argDef.name) && !argDef.argumentType.isOptional && argDef.defaultValue.isEmpty ⇒ + case argDef if !astArgs.contains(argDef.name) && !argDef.argumentType.isOptional && argDef.defaultValue.isEmpty => MissingFieldArgViolation(name, argDef.name, SchemaRenderer.renderTypeName(argDef.argumentType), ctx.sourceMapper, pos.toList) } @@ -44,4 +44,4 @@ class ProvidedRequiredArguments extends ValidationRule { } } } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/ScalarLeafs.scala b/src/main/scala/sangria/validation/rules/ScalarLeafs.scala index 01adc1e5..4a57442b 100644 --- a/src/main/scala/sangria/validation/rules/ScalarLeafs.scala +++ b/src/main/scala/sangria/validation/rules/ScalarLeafs.scala @@ -16,18 +16,18 @@ import sangria.validation._ class ScalarLeafs extends ValidationRule { override def visitor(ctx: ValidationContext) = new AstValidatingVisitor { override val onEnter: ValidationVisit = { - case ast.Field(_, name, _, _, sels, _, _, pos) ⇒ + case ast.Field(_, name, _, _, sels, _, _, pos) => ctx.typeInfo.tpe match { - case Some(fieldType) ⇒ + case Some(fieldType) => fieldType.namedType match { - case tpe if tpe.isInstanceOf[LeafType] && sels.nonEmpty ⇒ + case tpe if tpe.isInstanceOf[LeafType] && sels.nonEmpty => Left(Vector(NoSubselectionAllowedViolation(name, SchemaRenderer.renderTypeName(tpe, true), ctx.sourceMapper, pos.toList))) - case tpe if !tpe.isInstanceOf[LeafType] && sels.isEmpty ⇒ + case tpe if !tpe.isInstanceOf[LeafType] && sels.isEmpty => Left(Vector(RequiredSubselectionViolation(name, SchemaRenderer.renderTypeName(fieldType, false), ctx.sourceMapper, pos.toList))) - case _ ⇒ AstVisitorCommand.RightContinue + case _ => AstVisitorCommand.RightContinue } - case None ⇒ AstVisitorCommand.RightContinue + case None => AstVisitorCommand.RightContinue } } } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/SingleFieldSubscriptions.scala b/src/main/scala/sangria/validation/rules/SingleFieldSubscriptions.scala index af089f15..576f7a9a 100644 --- a/src/main/scala/sangria/validation/rules/SingleFieldSubscriptions.scala +++ b/src/main/scala/sangria/validation/rules/SingleFieldSubscriptions.scala @@ -12,8 +12,8 @@ import sangria.validation._ class SingleFieldSubscriptions extends ValidationRule { override def visitor(ctx: ValidationContext) = new AstValidatingVisitor { override val onEnter: ValidationVisit = { - case od: ast.OperationDefinition if od.operationType == OperationType.Subscription && od.selections.size > 1 ⇒ + case od: ast.OperationDefinition if od.operationType == OperationType.Subscription && od.selections.size > 1 => Left(Vector(SubscriptionSingleFieldOnlyViolation(od.name, ctx.sourceMapper, od.selections.tail.flatMap(_.location).toList))) } } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/UniqueArgumentNames.scala b/src/main/scala/sangria/validation/rules/UniqueArgumentNames.scala index 3ed5f27f..f457e77e 100644 --- a/src/main/scala/sangria/validation/rules/UniqueArgumentNames.scala +++ b/src/main/scala/sangria/validation/rules/UniqueArgumentNames.scala @@ -4,7 +4,7 @@ import sangria.ast import sangria.ast.AstVisitorCommand import sangria.validation._ -import scala.collection.mutable.{Set ⇒ MutableSet} +import scala.collection.mutable.{Set => MutableSet} /** * Unique argument names @@ -17,13 +17,13 @@ class UniqueArgumentNames extends ValidationRule { val knownArgNames = MutableSet[String]() override val onEnter: ValidationVisit = { - case _: ast.Field ⇒ + case _: ast.Field => knownArgNames.clear() AstVisitorCommand.RightContinue - case _: ast.Directive ⇒ + case _: ast.Directive => knownArgNames.clear() AstVisitorCommand.RightContinue - case ast.Argument(name, _, _, pos) ⇒ + case ast.Argument(name, _, _, pos) => if (knownArgNames contains name) Left(Vector(DuplicateArgNameViolation(name, ctx.sourceMapper, pos.toList))) else { @@ -32,4 +32,4 @@ class UniqueArgumentNames extends ValidationRule { } } } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/UniqueDirectivesPerLocation.scala b/src/main/scala/sangria/validation/rules/UniqueDirectivesPerLocation.scala index 8124c72f..ea419529 100644 --- a/src/main/scala/sangria/validation/rules/UniqueDirectivesPerLocation.scala +++ b/src/main/scala/sangria/validation/rules/UniqueDirectivesPerLocation.scala @@ -4,7 +4,7 @@ import sangria.ast import sangria.ast.AstVisitorCommand import sangria.validation._ -import scala.collection.mutable.{Map ⇒ MutableMap} +import scala.collection.mutable.{Map => MutableMap} /** * Unique directive names per location @@ -18,13 +18,13 @@ class UniqueDirectivesPerLocation extends ValidationRule { // Many different AST nodes may contain directives. Rather than listing // them all, just listen for entering any node, and check to see if it // defines any directives. - case node: ast.WithDirectives ⇒ + case node: ast.WithDirectives => val knownDirectives = MutableMap[String, ast.Directive]() val errors = node.directives.foldLeft(Vector.empty[Violation]) { - case (errors, d) if knownDirectives contains d.name ⇒ + case (errors, d) if knownDirectives contains d.name => errors :+ DuplicateDirectiveViolation(d.name, ctx.sourceMapper, knownDirectives(d.name).location.toList ++ d.location.toList ) - case (errors, d) ⇒ + case (errors, d) => knownDirectives(d.name) = d errors } @@ -33,4 +33,4 @@ class UniqueDirectivesPerLocation extends ValidationRule { else AstVisitorCommand.RightContinue } } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/UniqueFragmentNames.scala b/src/main/scala/sangria/validation/rules/UniqueFragmentNames.scala index 7344a9d3..bfe07591 100644 --- a/src/main/scala/sangria/validation/rules/UniqueFragmentNames.scala +++ b/src/main/scala/sangria/validation/rules/UniqueFragmentNames.scala @@ -4,7 +4,7 @@ import sangria.ast import sangria.ast.AstVisitorCommand import sangria.validation._ -import scala.collection.mutable.{Set ⇒ MutableSet} +import scala.collection.mutable.{Set => MutableSet} /** * Unique fragment names @@ -16,7 +16,7 @@ class UniqueFragmentNames extends ValidationRule { val knownFragmentNames = MutableSet[String]() override val onEnter: ValidationVisit = { - case fragDef: ast.FragmentDefinition ⇒ + case fragDef: ast.FragmentDefinition => if (knownFragmentNames contains fragDef.name) Left(Vector(DuplicateFragmentNameViolation(fragDef.name, ctx.sourceMapper, fragDef.location.toList))) else { @@ -25,4 +25,4 @@ class UniqueFragmentNames extends ValidationRule { } } } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/UniqueInputFieldNames.scala b/src/main/scala/sangria/validation/rules/UniqueInputFieldNames.scala index 8a816310..afdefde9 100644 --- a/src/main/scala/sangria/validation/rules/UniqueInputFieldNames.scala +++ b/src/main/scala/sangria/validation/rules/UniqueInputFieldNames.scala @@ -2,7 +2,7 @@ package sangria.validation.rules import sangria.ast.AstLocation -import scala.collection.mutable.{Map ⇒ MutableMap} +import scala.collection.mutable.{Map => MutableMap} import sangria.ast import sangria.ast.AstVisitorCommand @@ -20,26 +20,26 @@ class UniqueInputFieldNames extends ValidationRule { var knownNames = MutableMap[String, Option[AstLocation]]() override val onEnter: ValidationVisit = { - case ast.ObjectValue(_, _, _) ⇒ + case ast.ObjectValue(_, _, _) => knownNameStack.push(knownNames) knownNames = MutableMap[String, Option[AstLocation]]() AstVisitorCommand.RightContinue - case ast.ObjectField(name, _, _, pos) ⇒ + case ast.ObjectField(name, _, _, pos) => if (knownNames contains name) Left(Vector(DuplicateInputFieldViolation(name, ctx.sourceMapper, knownNames(name).toList ++ pos.toList))) else { - knownNames += name → pos + knownNames += name -> pos AstVisitorCommand.RightContinue } } override def onLeave: ValidationVisit = { - case ast.ObjectValue(_, _, _) ⇒ + case ast.ObjectValue(_, _, _) => knownNames = knownNameStack.pop() AstVisitorCommand.RightContinue } } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/UniqueOperationNames.scala b/src/main/scala/sangria/validation/rules/UniqueOperationNames.scala index 7159d477..95ef1d98 100644 --- a/src/main/scala/sangria/validation/rules/UniqueOperationNames.scala +++ b/src/main/scala/sangria/validation/rules/UniqueOperationNames.scala @@ -4,7 +4,7 @@ import sangria.ast import sangria.ast.AstVisitorCommand import sangria.validation._ -import scala.collection.mutable.{Set ⇒ MutableSet} +import scala.collection.mutable.{Set => MutableSet} /** * Unique operation names @@ -16,7 +16,7 @@ class UniqueOperationNames extends ValidationRule { val knownOpNames = MutableSet[String]() override val onEnter: ValidationVisit = { - case ast.OperationDefinition(_, Some(name), _, _, _, _, _, pos) ⇒ + case ast.OperationDefinition(_, Some(name), _, _, _, _, _, pos) => if (knownOpNames contains name) Left(Vector(DuplicateOperationNameViolation(name, ctx.sourceMapper, pos.toList))) else { @@ -25,4 +25,4 @@ class UniqueOperationNames extends ValidationRule { } } } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/UniqueVariableNames.scala b/src/main/scala/sangria/validation/rules/UniqueVariableNames.scala index 7992abdc..fd628596 100644 --- a/src/main/scala/sangria/validation/rules/UniqueVariableNames.scala +++ b/src/main/scala/sangria/validation/rules/UniqueVariableNames.scala @@ -5,7 +5,7 @@ import sangria.ast import sangria.ast.AstVisitorCommand import sangria.validation._ -import scala.collection.mutable.{Map ⇒ MutableMap} +import scala.collection.mutable.{Map => MutableMap} /** * Unique variable names @@ -17,16 +17,16 @@ class UniqueVariableNames extends ValidationRule { val knownVariableNames = MutableMap[String, List[AstLocation]]() override val onEnter: ValidationVisit = { - case _: ast.OperationDefinition ⇒ + case _: ast.OperationDefinition => knownVariableNames.clear() AstVisitorCommand.RightContinue - case ast.VariableDefinition(name, _, _, _, _, pos) ⇒ + case ast.VariableDefinition(name, _, _, _, _, pos) => knownVariableNames get name match { - case Some(otherPos) ⇒ + case Some(otherPos) => Left(Vector(DuplicateVariableViolation(name, ctx.sourceMapper, otherPos ++ pos.toList))) - case None ⇒ - knownVariableNames += name → pos.toList + case None => + knownVariableNames += name -> pos.toList AstVisitorCommand.RightContinue } } diff --git a/src/main/scala/sangria/validation/rules/ValuesOfCorrectType.scala b/src/main/scala/sangria/validation/rules/ValuesOfCorrectType.scala index 052f3b52..a535a3ef 100644 --- a/src/main/scala/sangria/validation/rules/ValuesOfCorrectType.scala +++ b/src/main/scala/sangria/validation/rules/ValuesOfCorrectType.scala @@ -17,50 +17,50 @@ import sangria.util.StringUtil class ValuesOfCorrectType extends ValidationRule { override def visitor(ctx: ValidationContext) = new AstValidatingVisitor { override val onEnter: ValidationVisit = { - case v: ast.NullValue ⇒ + case v: ast.NullValue => ctx.typeInfo.inputType match { - case Some(tpe) if !tpe.isOptional ⇒ badValue(tpe, v) - case _ ⇒ RightContinue + case Some(tpe) if !tpe.isOptional => badValue(tpe, v) + case _ => RightContinue } - case v: ast.ListValue ⇒ + case v: ast.ListValue => // Note: TypeInfo will traverse into a list's item type, so look to the parent input type to check if it is a list. ctx.typeInfo.parentInputType match { - case Some(tpe) if !tpe.nonOptionalType.isList ⇒ + case Some(tpe) if !tpe.nonOptionalType.isList => isValidScalar(v) match { - case Right(_) ⇒ Right(AstVisitorCommand.Skip) - case l @ Left(_) ⇒ l + case Right(_) => Right(AstVisitorCommand.Skip) + case l @ Left(_) => l } - case _ ⇒ RightContinue + case _ => RightContinue } - case v: ast.ObjectValue ⇒ + case v: ast.ObjectValue => ctx.typeInfo.inputType.map(_.namedType) match { - case Some(tpe: InputObjectType[_]) ⇒ + case Some(tpe: InputObjectType[_]) => val errors = - tpe.fields.toVector.flatMap { fieldDef ⇒ + tpe.fields.toVector.flatMap { fieldDef => v.fieldsByName.get(fieldDef.name) match { - case None if !fieldDef.fieldType.isOptional && fieldDef.defaultValue.isEmpty ⇒ + case None if !fieldDef.fieldType.isOptional && fieldDef.defaultValue.isEmpty => Vector(RequiredFieldViolation(tpe.name, fieldDef.name, SchemaRenderer.renderTypeName(fieldDef.fieldType), ctx.sourceMapper, v.location.toList)) - case _ ⇒ Vector.empty + case _ => Vector.empty } } if (errors.nonEmpty) Left(errors) else RightContinue - case Some(_) ⇒ + case Some(_) => isValidScalar(v) match { - case Right(_) ⇒ Right(AstVisitorCommand.Skip) - case l @ Left(_) ⇒ l + case Right(_) => Right(AstVisitorCommand.Skip) + case l @ Left(_) => l } - case _ ⇒ RightContinue + case _ => RightContinue } - case v: ast.ObjectField ⇒ + case v: ast.ObjectField => (ctx.typeInfo.parentInputType.map(_.namedType), ctx.typeInfo.inputType) match { - case (Some(tpe: InputObjectType[_]), None) ⇒ + case (Some(tpe: InputObjectType[_]), None) => val suggestions = StringUtil.suggestionList(v.name, tpe.fields.map(_.name)) val didYouMean = if (suggestions.nonEmpty) Some(s"Did you mean ${StringUtil.orList(suggestions)}?") @@ -68,26 +68,26 @@ class ValuesOfCorrectType extends ValidationRule { Left(Vector(UnknownFieldViolation(tpe.name, v.name, didYouMean, ctx.sourceMapper, v.location.toList))) - case _ ⇒ RightContinue + case _ => RightContinue } - case v: ast.EnumValue ⇒ + case v: ast.EnumValue => ctx.typeInfo.inputType.map(_.namedType) match { - case Some(tpe: EnumType[_]) ⇒ + case Some(tpe: EnumType[_]) => tpe.coerceInput(v) match { - case Left(violation) ⇒ badValue(tpe, v, Some(violation)) - case _ ⇒ RightContinue + case Left(violation) => badValue(tpe, v, Some(violation)) + case _ => RightContinue } - case _ ⇒ isValidScalar(v) + case _ => isValidScalar(v) } - case v: ast.IntValue ⇒ isValidScalar(v) - case v: ast.BigIntValue ⇒ isValidScalar(v) - case v: ast.FloatValue ⇒ isValidScalar(v) - case v: ast.BigDecimalValue ⇒ isValidScalar(v) - case v: ast.StringValue ⇒ isValidScalar(v) - case v: ast.BooleanValue ⇒ isValidScalar(v) + case v: ast.IntValue => isValidScalar(v) + case v: ast.BigIntValue => isValidScalar(v) + case v: ast.FloatValue => isValidScalar(v) + case v: ast.BigDecimalValue => isValidScalar(v) + case v: ast.StringValue => isValidScalar(v) + case v: ast.BooleanValue => isValidScalar(v) } def badValue(tpe: Type, node: ast.AstNode, violation: Option[Violation] = None) = @@ -103,40 +103,40 @@ class ValuesOfCorrectType extends ValidationRule { )) def enumTypeSuggestion(tpe: Type, node: ast.Value): Option[Violation] = tpe match { - case enum: EnumType[_] ⇒ + case enum: EnumType[_] => val name = QueryRenderer.render(node) val suggestions = StringUtil.suggestionList(name, enum.values.map(_.name)) if (suggestions.nonEmpty) Some(EnumValueCoercionViolation(name, enum.name, suggestions)) else None - case _ ⇒ None + case _ => None } def isValidScalar(value: ast.Value) = ctx.typeInfo.inputType match { - case Some(tpe) ⇒ + case Some(tpe) => tpe.namedInputType match { - case s: ScalarType[_] ⇒ + case s: ScalarType[_] => s.coerceInput(value) match { - case Left(violation) ⇒ badValue(tpe, value, Some(violation)) - case _ ⇒ RightContinue + case Left(violation) => badValue(tpe, value, Some(violation)) + case _ => RightContinue } - case s: ScalarAlias[_, _] ⇒ + case s: ScalarAlias[_, _] => s.aliasFor.coerceInput(value) match { - case Left(violation) ⇒ badValue(tpe, value, Some(violation)) - case Right(v) ⇒ s.fromScalar(v) match { - case Left(violation) ⇒ badValue(tpe, value, Some(violation)) - case _ ⇒ RightContinue + case Left(violation) => badValue(tpe, value, Some(violation)) + case Right(v) => s.fromScalar(v) match { + case Left(violation) => badValue(tpe, value, Some(violation)) + case _ => RightContinue } } - case t ⇒ + case t => badValue(tpe, value, enumTypeSuggestion(t, value)) } - case _ ⇒ RightContinue + case _ => RightContinue } } } diff --git a/src/main/scala/sangria/validation/rules/VariablesAreInputTypes.scala b/src/main/scala/sangria/validation/rules/VariablesAreInputTypes.scala index acdae008..d1b0847d 100644 --- a/src/main/scala/sangria/validation/rules/VariablesAreInputTypes.scala +++ b/src/main/scala/sangria/validation/rules/VariablesAreInputTypes.scala @@ -15,12 +15,12 @@ import sangria.validation._ class VariablesAreInputTypes extends ValidationRule { override def visitor(ctx: ValidationContext) = new AstValidatingVisitor { override val onEnter: ValidationVisit = { - case ast.VariableDefinition(name, tpe, _, _, _, pos) ⇒ + case ast.VariableDefinition(name, tpe, _, _, _, pos) => ctx.schema.getInputType(tpe) match { - case Some(_) ⇒ AstVisitorCommand.RightContinue - case None ⇒ Left(Vector( + case Some(_) => AstVisitorCommand.RightContinue + case None => Left(Vector( NonInputTypeOnVarViolation(name, QueryRenderer.render(tpe), ctx.sourceMapper, tpe.location.toList))) } } } -} \ No newline at end of file +} diff --git a/src/main/scala/sangria/validation/rules/VariablesInAllowedPosition.scala b/src/main/scala/sangria/validation/rules/VariablesInAllowedPosition.scala index 01fea2c7..2a7ee22d 100644 --- a/src/main/scala/sangria/validation/rules/VariablesInAllowedPosition.scala +++ b/src/main/scala/sangria/validation/rules/VariablesInAllowedPosition.scala @@ -18,17 +18,17 @@ class VariablesInAllowedPosition extends ValidationRule { val varDefs = MutableMap[String, ast.VariableDefinition]() override val onEnter: ValidationVisit = { - case _: ast.OperationDefinition ⇒ + case _: ast.OperationDefinition => varDefs.clear() AstVisitorCommand.RightContinue - case varDef: ast.VariableDefinition ⇒ + case varDef: ast.VariableDefinition => varDefs(varDef.name) = varDef AstVisitorCommand.RightContinue } override def onLeave: ValidationVisit = { - case operation: ast.OperationDefinition ⇒ + case operation: ast.OperationDefinition => val usages = ctx.documentAnalyzer.getRecursiveVariableUsages(operation) // A var type is allowed if it is the same or more strict (e.g. is @@ -36,11 +36,11 @@ class VariablesInAllowedPosition extends ValidationRule { // the variable type is non-null when the expected type is nullable. // If both are list types, the variable item type can be more strict // than the expected item type (contravariant). - val errors = usages.toVector.flatMap { usage ⇒ + val errors = usages.toVector.flatMap { usage => for { - varDef ← varDefs.get(usage.node.name) - tpe ← usage.tpe - inputTpe ← ctx.schema.getInputType(varDef.tpe) + varDef <- varDefs.get(usage.node.name) + tpe <- usage.tpe + inputTpe <- ctx.schema.getInputType(varDef.tpe) if !allowedVariableUsage(ctx.schema, inputTpe, varDef.defaultValue, tpe, usage.defaultValue) } yield BadVarPositionViolation( usage.node.name, @@ -66,11 +66,11 @@ class VariablesInAllowedPosition extends ValidationRule { locationDefaultValue: Option[(_, ToInput[_, _])] ) = if (!locationType.isOptional && varType.isOptional) { - val hasNonNullVariableDefaultValue = varDefaultValue.exists(default ⇒ !default.isInstanceOf[ast.NullValue]) + val hasNonNullVariableDefaultValue = varDefaultValue.exists(default => !default.isInstanceOf[ast.NullValue]) val hasLocationDefaultValue = locationDefaultValue.isDefined if (!hasNonNullVariableDefaultValue && !hasLocationDefaultValue) false else TypeComparators.isSubType(schema, varType.nonOptionalType, locationType) } else TypeComparators.isSubType(schema, varType, locationType) } -} \ No newline at end of file +} diff --git a/src/test/scala/sangria/execution/ActionMapSpec.scala b/src/test/scala/sangria/execution/ActionMapSpec.scala index 91889393..6af98e93 100644 --- a/src/test/scala/sangria/execution/ActionMapSpec.scala +++ b/src/test/scala/sangria/execution/ActionMapSpec.scala @@ -17,7 +17,7 @@ class ActionMapSpec extends WordSpec with Matchers with FutureResultSupport { class ColorResolver extends DeferredResolver[Any] { override def resolve(deferred: Vector[Deferred[Any]], ctx: Any, queryState: Any)(implicit ec: ExecutionContext) = deferred map { - case ColorDefer(num) ⇒ Future.successful("[" + (num + 45) + "]") + case ColorDefer(num) => Future.successful("[" + (num + 45) + "]") } } @@ -27,36 +27,36 @@ class ActionMapSpec extends WordSpec with Matchers with FutureResultSupport { case class SimpleError(message: String) extends Exception(message) with UserFacingError val QueryType = ObjectType("Query", fields[Unit, Unit]( - Field("value", StringType, resolve = _ ⇒ + Field("value", StringType, resolve = _ => Value("red").map("light-" + _)), - Field("doubleMap", StringType, resolve = _ ⇒ + Field("doubleMap", StringType, resolve = _ => Value("red").map("light-" + _).map(_ + "-color")), - Field("future", StringType, resolve = _ ⇒ + Field("future", StringType, resolve = _ => FutureValue(Future.successful("green")).map("light-" + _)), - Field("futureDouble", ColorType, resolve = _ ⇒ + Field("futureDouble", ColorType, resolve = _ => FutureValue(Future.successful("green")).map("light-" + _).map(Color(_))), - Field("futureTriple", StringType, resolve = _ ⇒ + Field("futureTriple", StringType, resolve = _ => FutureValue(Future.successful("green")).map("light-" + _).map(Color(_)).map("super-" + _.name)), - Field("deferred", StringType, resolve = _ ⇒ - DeferredValue(ColorDefer(123)).map(x ⇒ x + 345)), - Field("deferredPartialError", StringType, resolve = _ ⇒ + Field("deferred", StringType, resolve = _ => + DeferredValue(ColorDefer(123)).map(x => x + 345)), + Field("deferredPartialError", StringType, resolve = _ => DeferredValue(ColorDefer(123)) - .mapWithErrors(x ⇒ (x + 10, Vector(SimpleError("ooops"), SimpleError("something went wrong")))) - .map(x ⇒ x + "foo") - .mapWithErrors(x ⇒ (x + 23, Vector(SimpleError("mo errors"))))), - Field("futureDeferred", StringType, resolve = _ ⇒ - DeferredFutureValue(Future.successful(ColorDefer(34))).map(x ⇒ x + 56)), - Field("futureDeferredPartialError", StringType, resolve = _ ⇒ + .mapWithErrors(x => (x + 10, Vector(SimpleError("ooops"), SimpleError("something went wrong")))) + .map(x => x + "foo") + .mapWithErrors(x => (x + 23, Vector(SimpleError("mo errors"))))), + Field("futureDeferred", StringType, resolve = _ => + DeferredFutureValue(Future.successful(ColorDefer(34))).map(x => x + 56)), + Field("futureDeferredPartialError", StringType, resolve = _ => DeferredFutureValue(Future.successful(ColorDefer(34))) - .mapWithErrors(x ⇒ (x + 10, Vector(SimpleError("ooops"), SimpleError("something went wrong")))) - .map(x ⇒ x + "foo") - .mapWithErrors(x ⇒ (x + 23, Vector(SimpleError("mo errors"))))), - Field("futureDeferredDouble", StringType, resolve = _ ⇒ - DeferredFutureValue(Future.successful(ColorDefer(34))).map(x ⇒ x + 576).map("Yay! " + _ + " +++")), - Field("futureDeferredTriple", StringType, resolve = _ ⇒ - DeferredFutureValue(Future.successful(ColorDefer(34))).map(x ⇒ x + 576).map(Color(_)).map(c ⇒ "Yay! " + c.name + " +++")), - Field("ctxUpdate", ColorType, resolve = ctx ⇒ - UpdateCtx(DeferredFutureValue(Future.successful(ColorDefer(11)))){v ⇒ require(v == "[56]"); ctx.ctx}.map("!" + _ + "?").map(x ⇒ x + 576).map(Color(_)).map(c ⇒ "(" + c.name + ")").map(Color(_))) + .mapWithErrors(x => (x + 10, Vector(SimpleError("ooops"), SimpleError("something went wrong")))) + .map(x => x + "foo") + .mapWithErrors(x => (x + 23, Vector(SimpleError("mo errors"))))), + Field("futureDeferredDouble", StringType, resolve = _ => + DeferredFutureValue(Future.successful(ColorDefer(34))).map(x => x + 576).map("Yay! " + _ + " +++")), + Field("futureDeferredTriple", StringType, resolve = _ => + DeferredFutureValue(Future.successful(ColorDefer(34))).map(x => x + 576).map(Color(_)).map(c => "Yay! " + c.name + " +++")), + Field("ctxUpdate", ColorType, resolve = ctx => + UpdateCtx(DeferredFutureValue(Future.successful(ColorDefer(11)))){v => require(v == "[56]"); ctx.ctx}.map("!" + _ + "?").map(x => x + 576).map(Color(_)).map(c => "(" + c.name + ")").map(Color(_))) )) val schema = Schema(QueryType) @@ -79,17 +79,17 @@ class ActionMapSpec extends WordSpec with Matchers with FutureResultSupport { """) Executor.execute(schema, doc, deferredResolver = new ColorResolver).await should be (Map( - "data" → Map( - "value" → "light-red", - "doubleMap" → "light-red-color", - "future" → "light-green", - "futureDouble" → Map("name" → "light-green"), - "futureTriple" → "super-light-green", - "deferred" → "[168]345", - "futureDeferred" → "[79]56", - "futureDeferredDouble" → "Yay! [79]576 +++", - "futureDeferredTriple" → "Yay! [79]576 +++", - "ctxUpdate" → Map("name" → "(![56]?576)") + "data" -> Map( + "value" -> "light-red", + "doubleMap" -> "light-red-color", + "future" -> "light-green", + "futureDouble" -> Map("name" -> "light-green"), + "futureTriple" -> "super-light-green", + "deferred" -> "[168]345", + "futureDeferred" -> "[79]56", + "futureDeferredDouble" -> "Yay! [79]576 +++", + "futureDeferredTriple" -> "Yay! [79]576 +++", + "ctxUpdate" -> Map("name" -> "(![56]?576)") ))) } @@ -102,34 +102,34 @@ class ActionMapSpec extends WordSpec with Matchers with FutureResultSupport { """) Executor.execute(schema, doc, deferredResolver = new ColorResolver).await should be (Map( - "data" → Map( - "deferredPartialError" → "[168]10foo23", - "futureDeferredPartialError" → "[79]10foo23"), - "errors" → Vector( + "data" -> Map( + "deferredPartialError" -> "[168]10foo23", + "futureDeferredPartialError" -> "[79]10foo23"), + "errors" -> Vector( Map( - "message" → "ooops", - "path" → Vector("deferredPartialError"), - "locations" → Vector(Map("line" → 3, "column" → 11))), + "message" -> "ooops", + "path" -> Vector("deferredPartialError"), + "locations" -> Vector(Map("line" -> 3, "column" -> 11))), Map( - "message" → "something went wrong", - "path" → Vector("deferredPartialError"), - "locations" → Vector(Map("line" → 3, "column" → 11))), + "message" -> "something went wrong", + "path" -> Vector("deferredPartialError"), + "locations" -> Vector(Map("line" -> 3, "column" -> 11))), Map( - "message" → "mo errors", - "path" → Vector("deferredPartialError"), - "locations" → Vector(Map("line" → 3, "column" → 11))), + "message" -> "mo errors", + "path" -> Vector("deferredPartialError"), + "locations" -> Vector(Map("line" -> 3, "column" -> 11))), Map( - "message" → "ooops", - "path" → Vector("futureDeferredPartialError"), - "locations" → Vector(Map("line" → 4, "column" → 11))), + "message" -> "ooops", + "path" -> Vector("futureDeferredPartialError"), + "locations" -> Vector(Map("line" -> 4, "column" -> 11))), Map( - "message" → "something went wrong", - "path" → Vector("futureDeferredPartialError"), - "locations" → Vector(Map("line" → 4, "column" → 11))), + "message" -> "something went wrong", + "path" -> Vector("futureDeferredPartialError"), + "locations" -> Vector(Map("line" -> 4, "column" -> 11))), Map( - "message" → "mo errors", - "path" → Vector("futureDeferredPartialError"), - "locations" → Vector(Map("line" → 4, "column" → 11)))))) + "message" -> "mo errors", + "path" -> Vector("futureDeferredPartialError"), + "locations" -> Vector(Map("line" -> 4, "column" -> 11)))))) } } } diff --git a/src/test/scala/sangria/execution/ContextPassingSpec.scala b/src/test/scala/sangria/execution/ContextPassingSpec.scala index 430ddee8..e2bc711a 100644 --- a/src/test/scala/sangria/execution/ContextPassingSpec.scala +++ b/src/test/scala/sangria/execution/ContextPassingSpec.scala @@ -19,7 +19,7 @@ class ContextPassingSpec extends WordSpec with Matchers with FutureResultSupport } trait PersonComponent { - this: NameComponent ⇒ + this: NameComponent => def fullName = name + " bar" } @@ -35,14 +35,14 @@ class ContextPassingSpec extends WordSpec with Matchers with FutureResultSupport val PersonType = ObjectType("Person", fields[PersonComponent, Unit]( Field("fullName", StringType, resolve = _.ctx.fullName), - Field("name", NameType, resolve = _ ⇒ ()))) + Field("name", NameType, resolve = _ => ()))) def colorField[Ctx <: ColorComponent with NameComponent] = - Field("color", ColorType, None, resolve = (ctx: Context[Ctx, Unit]) ⇒ ()) + Field("color", ColorType, None, resolve = (ctx: Context[Ctx, Unit]) => ()) val QueryType = ObjectType("Query", fields[Cake, Unit]( colorField, - Field("person", PersonType, resolve = _ ⇒ ()) + Field("person", PersonType, resolve = _ => ()) )) val schema = Schema(QueryType) @@ -60,13 +60,13 @@ class ContextPassingSpec extends WordSpec with Matchers with FutureResultSupport """) Executor.execute(schema, doc, userContext = new Cake).await should be (Map( - "data" → Map( - "color" → Map( - "name" → "foo", - "colorName" → "green"), - "person" → Map( - "name" → Map("name" → "foo"), - "fullName" → "foo bar")))) + "data" -> Map( + "color" -> Map( + "name" -> "foo", + "colorName" -> "green"), + "person" -> Map( + "name" -> Map("name" -> "foo"), + "fullName" -> "foo bar")))) } } diff --git a/src/test/scala/sangria/execution/DeprecationTrackerSpec.scala b/src/test/scala/sangria/execution/DeprecationTrackerSpec.scala index 66d154eb..d755eaaf 100644 --- a/src/test/scala/sangria/execution/DeprecationTrackerSpec.scala +++ b/src/test/scala/sangria/execution/DeprecationTrackerSpec.scala @@ -32,8 +32,8 @@ class DeprecationTrackerSpec extends WordSpec with Matchers with FutureResultSup "DeprecationTracker" should { "not track non-deprecated fields" in { val testType = ObjectType("TestType", fields[Unit, Unit]( - Field("nonDeprecated", OptionType(StringType), resolve = _ ⇒ None), - Field("deprecated", OptionType(StringType), deprecationReason = Some("Removed in 1.0"), resolve = _ ⇒ None) + Field("nonDeprecated", OptionType(StringType), resolve = _ => None), + Field("deprecated", OptionType(StringType), deprecationReason = Some("Removed in 1.0"), resolve = _ => None) )) val schema = Schema(testType) @@ -48,8 +48,8 @@ class DeprecationTrackerSpec extends WordSpec with Matchers with FutureResultSup "track deprecated fields" in { val testType = ObjectType("TestType", fields[Unit, Unit]( - Field("nonDeprecated", OptionType(StringType), resolve = _ ⇒ None), - Field("deprecated", OptionType(StringType), deprecationReason = Some("Removed in 1.0"), resolve = _ ⇒ None) + Field("nonDeprecated", OptionType(StringType), resolve = _ => None), + Field("deprecated", OptionType(StringType), deprecationReason = Some("Removed in 1.0"), resolve = _ => None) )) val schema = Schema(testType) @@ -64,10 +64,10 @@ class DeprecationTrackerSpec extends WordSpec with Matchers with FutureResultSup } "provide context information" in { - lazy val testType: ObjectType[Unit, Unit] = ObjectType("TestType", () ⇒ fields[Unit, Unit]( - Field("nonDeprecated", OptionType(StringType), resolve = _ ⇒ None), - Field("deprecated", OptionType(StringType), deprecationReason = Some("Removed in 1.0"), resolve = _ ⇒ None), - Field("nested", OptionType(testType), resolve = _ ⇒ Some(())) + lazy val testType: ObjectType[Unit, Unit] = ObjectType("TestType", () => fields[Unit, Unit]( + Field("nonDeprecated", OptionType(StringType), resolve = _ => None), + Field("deprecated", OptionType(StringType), deprecationReason = Some("Removed in 1.0"), resolve = _ => None), + Field("nested", OptionType(testType), resolve = _ => Some(())) )) val schema = Schema(testType) @@ -83,12 +83,12 @@ class DeprecationTrackerSpec extends WordSpec with Matchers with FutureResultSup } "report usage even if field is defined only in the interface type" in { - val testInt = InterfaceType("TestInterface", () ⇒ fields[Unit, Unit]( - Field("foo", OptionType(StringType), deprecationReason = Some("Removed in 1.0"), resolve = _ ⇒ None) + val testInt = InterfaceType("TestInterface", () => fields[Unit, Unit]( + Field("foo", OptionType(StringType), deprecationReason = Some("Removed in 1.0"), resolve = _ => None) )) val testType = ObjectType("TestType", interfaces[Unit, Unit](testInt), fields[Unit, Unit]( - Field("foo", OptionType(StringType), resolve = _ ⇒ None) + Field("foo", OptionType(StringType), resolve = _ => None) )) val schema = Schema(testType) @@ -112,7 +112,7 @@ class DeprecationTrackerSpec extends WordSpec with Matchers with FutureResultSup val testType = ObjectType("TestType", fields[Unit, Unit]( Field("testEnum", OptionType(StringType), arguments = Argument("foo", testEnum) :: Nil, - resolve = _ ⇒ None) + resolve = _ => None) )) val schema = Schema(testType) @@ -143,7 +143,7 @@ class DeprecationTrackerSpec extends WordSpec with Matchers with FutureResultSup val testType = ObjectType("TestType", fields[Unit, Unit]( Field("testEnum", OptionType(StringType), arguments = Argument("foo", testEnum) :: Nil, - resolve = _ ⇒ None) + resolve = _ => None) )) val schema = Schema(testType) @@ -169,8 +169,8 @@ class DeprecationTrackerSpec extends WordSpec with Matchers with FutureResultSup "NilDeprecationTracker" should { "shouldn't do anything" in { val testType = ObjectType("TestType", fields[Unit, Unit]( - Field("nonDeprecated", OptionType(StringType), resolve = _ ⇒ None), - Field("deprecated", OptionType(StringType), deprecationReason = Some("Removed in 1.0"), resolve = _ ⇒ None) + Field("nonDeprecated", OptionType(StringType), resolve = _ => None), + Field("deprecated", OptionType(StringType), deprecationReason = Some("Removed in 1.0"), resolve = _ => None) )) val schema = Schema(testType) @@ -190,7 +190,7 @@ class DeprecationTrackerSpec extends WordSpec with Matchers with FutureResultSup val testType = ObjectType("TestType", fields[Unit, Unit]( Field("testEnum", OptionType(StringType), arguments = Argument("foo", testEnum) :: Nil, - resolve = _ ⇒ None) + resolve = _ => None) )) val schema = Schema(testType) @@ -213,8 +213,8 @@ class DeprecationTrackerSpec extends WordSpec with Matchers with FutureResultSup "track deprecated fields" in { val testType = ObjectType("TestType", fields[Unit, Unit]( - Field("nonDeprecated", OptionType(StringType), resolve = _ ⇒ None), - Field("deprecated", OptionType(StringType), deprecationReason = Some("Removed in 1.0"), resolve = _ ⇒ None) + Field("nonDeprecated", OptionType(StringType), resolve = _ => None), + Field("deprecated", OptionType(StringType), deprecationReason = Some("Removed in 1.0"), resolve = _ => None) )) val schema = Schema(testType) diff --git a/src/test/scala/sangria/execution/DirectivesSpec.scala b/src/test/scala/sangria/execution/DirectivesSpec.scala index aaf73dd4..7461cd34 100644 --- a/src/test/scala/sangria/execution/DirectivesSpec.scala +++ b/src/test/scala/sangria/execution/DirectivesSpec.scala @@ -18,7 +18,7 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { description = Some("Directs the executor to include this fragment definition only when the `if` argument is true."), arguments = IfArg :: Nil, locations = Set(DirectiveLocation.FragmentDefinition), - shouldInclude = ctx ⇒ ctx.arg(IfArg)) + shouldInclude = ctx => ctx.arg(IfArg)) val schema = Schema(ObjectType("TestType", fields[Unit, TestSubject]( Field("a", OptionType(StringType), resolve = _.value.a), @@ -36,25 +36,25 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { "Execute: handles directives" when { "works without directives" should { "basic query works" in { - executeTestQuery("{ a, b }") should be (Map("data" → Map("a" → "a", "b" → "b"))) + executeTestQuery("{ a, b }") should be (Map("data" -> Map("a" -> "a", "b" -> "b"))) } } "works on scalars" should { "if true includes scalar" in { - executeTestQuery("{ a, b @include(if: true) }") should be (Map("data" → Map("a" → "a", "b" → "b"))) + executeTestQuery("{ a, b @include(if: true) }") should be (Map("data" -> Map("a" -> "a", "b" -> "b"))) } "if false omits on scalar" in { - executeTestQuery("{ a, b @include(if: false) }") should be (Map("data" → Map("a" → "a"))) + executeTestQuery("{ a, b @include(if: false) }") should be (Map("data" -> Map("a" -> "a"))) } "unless false includes scalar" in { - executeTestQuery("{ a, b @skip(if: false) }") should be (Map("data" → Map("a" → "a", "b" → "b"))) + executeTestQuery("{ a, b @skip(if: false) }") should be (Map("data" -> Map("a" -> "a", "b" -> "b"))) } "unless true omits scalar" in { - executeTestQuery("{ a, b @skip(if: true) }") should be (Map("data" → Map("a" → "a"))) + executeTestQuery("{ a, b @skip(if: true) }") should be (Map("data" -> Map("a" -> "a"))) } } @@ -69,7 +69,7 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { fragment Frag on TestType { b } - """) should be (Map("data" → Map("a" → "a"))) + """) should be (Map("data" -> Map("a" -> "a"))) } "if true includes fragment spread" in { @@ -82,7 +82,7 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { fragment Frag on TestType { b } - """) should be (Map("data" → Map("a" → "a", "b" → "b"))) + """) should be (Map("data" -> Map("a" -> "a", "b" -> "b"))) } "unless false includes fragment spread" in { @@ -95,7 +95,7 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { fragment Frag on TestType { b } - """) should be (Map("data" → Map("a" → "a", "b" → "b"))) + """) should be (Map("data" -> Map("a" -> "a", "b" -> "b"))) } "unless true omits fragment spread" in { @@ -108,7 +108,7 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { fragment Frag on TestType { b } - """) should be (Map("data" → Map("a" → "a"))) + """) should be (Map("data" -> Map("a" -> "a"))) } } @@ -122,7 +122,7 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { b } } - """) should be (Map("data" → Map("a" → "a"))) + """) should be (Map("data" -> Map("a" -> "a"))) } "if true includes inline fragment" in { @@ -134,7 +134,7 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { b } } - """) should be (Map("data" → Map("a" → "a", "b" → "b"))) + """) should be (Map("data" -> Map("a" -> "a", "b" -> "b"))) } "unless false includes inline fragment" in { @@ -146,7 +146,7 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { b } } - """) should be (Map("data" → Map("a" → "a", "b" → "b"))) + """) should be (Map("data" -> Map("a" -> "a", "b" -> "b"))) } "unless true includes inline fragment" in { @@ -158,7 +158,7 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { b } } - """) should be (Map("data" → Map("a" → "a"))) + """) should be (Map("data" -> Map("a" -> "a"))) } } @@ -172,7 +172,7 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { b } } - """) should be (Map("data" → Map("a" → "a"))) + """) should be (Map("data" -> Map("a" -> "a"))) } "if true includes anonymous inline fragment" in { @@ -184,7 +184,7 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { b } } - """) should be (Map("data" → Map("a" → "a", "b" → "b"))) + """) should be (Map("data" -> Map("a" -> "a", "b" -> "b"))) } "unless false includes anonymous inline fragment" in { @@ -196,7 +196,7 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { b } } - """) should be (Map("data" → Map("a" → "a", "b" → "b"))) + """) should be (Map("data" -> Map("a" -> "a", "b" -> "b"))) } "unless true includes anonymous inline fragment" in { @@ -208,7 +208,7 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { b } } - """) should be (Map("data" → Map("a" → "a"))) + """) should be (Map("data" -> Map("a" -> "a"))) } } @@ -223,7 +223,7 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { fragment Frag on TestType @fragDefInclude(if: false) { b } - """) should be (Map("data" → Map("a" → "a"))) + """) should be (Map("data" -> Map("a" -> "a"))) } "if true includes fragment" in { @@ -236,7 +236,7 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { fragment Frag on TestType @fragDefInclude(if: true) { b } - """) should be (Map("data" → Map("a" → "a", "b" → "b"))) + """) should be (Map("data" -> Map("a" -> "a", "b" -> "b"))) } "if false omits fragment (unsupported location)" in { @@ -249,7 +249,7 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { fragment Frag on TestType @include(if: false) { b } - """) should be (Map("data" → Map("a" → "a"))) + """) should be (Map("data" -> Map("a" -> "a"))) } "if true omits fragment (unsupported location)" in { @@ -262,7 +262,7 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { fragment Frag on TestType @include(if: true) { b } - """) should be (Map("data" → Map("a" → "a"))) + """) should be (Map("data" -> Map("a" -> "a"))) } "unless false omits fragment (unsupported location)" in { @@ -275,7 +275,7 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { fragment Frag on TestType @skip(if: false) { b } - """) should be (Map("data" → Map("a" → "a"))) + """) should be (Map("data" -> Map("a" -> "a"))) } "unless true omits fragment (unsupported location)" in { @@ -288,22 +288,22 @@ class DirectivesSpec extends WordSpec with Matchers with FutureResultSupport { fragment Frag on TestType @skip(if: true) { b } - """) should be (Map("data" → Map("a" → "a"))) + """) should be (Map("data" -> Map("a" -> "a"))) } } "works with skip and include directives" should { "include and no skip" in { - executeTestQuery("{ a, b @include(if: true) @skip(if: false) }") should be (Map("data" → Map("a" → "a", "b" → "b"))) + executeTestQuery("{ a, b @include(if: true) @skip(if: false) }") should be (Map("data" -> Map("a" -> "a", "b" -> "b"))) } "include and skip" in { - executeTestQuery("{ a, b @include(if: true) @skip(if: true) }") should be (Map("data" → Map("a" → "a"))) + executeTestQuery("{ a, b @include(if: true) @skip(if: true) }") should be (Map("data" -> Map("a" -> "a"))) } "no include or skip" in { - executeTestQuery("{ a, b @include(if: false) @skip(if: false) }") should be (Map("data" → Map("a" → "a"))) + executeTestQuery("{ a, b @include(if: false) @skip(if: false) }") should be (Map("data" -> Map("a" -> "a"))) } } } -} \ No newline at end of file +} diff --git a/src/test/scala/sangria/execution/ExceptionHandlingSpec.scala b/src/test/scala/sangria/execution/ExceptionHandlingSpec.scala index 9fb1e74e..79047bc9 100644 --- a/src/test/scala/sangria/execution/ExceptionHandlingSpec.scala +++ b/src/test/scala/sangria/execution/ExceptionHandlingSpec.scala @@ -18,19 +18,19 @@ class ExceptionHandlingSpec extends WordSpec with Matchers with FutureResultSupp val errorScalar = ScalarAlias[String, String](StringType, toScalar = identity, - fromScalar = _ ⇒ Left(EmailTypeViolation)) + fromScalar = _ => Left(EmailTypeViolation)) val TestType = ObjectType("Test", fields[Unit, Unit]( Field("success", OptionType(StringType), arguments = Argument("num", OptionInputType(IntType)) :: Nil, - resolve = _ ⇒ "Yay"), + resolve = _ => "Yay"), Field("errorInScalar", OptionType(StringType), arguments = Argument("email", errorScalar) :: Nil, - resolve = _ ⇒ "Yay"), - Field("trySuccess", OptionType(StringType), resolve = _ ⇒ Success("try!")), - Field("tryError", OptionType(StringType), resolve = _ ⇒ Failure(new IllegalStateException("try boom!"))), - Field("error", OptionType(StringType), resolve = _ ⇒ throw new IllegalStateException("Boom!")), - Field("futureError", OptionType(StringType), resolve = _ ⇒ Future.failed[String](new IllegalStateException("Boom!"))) + resolve = _ => "Yay"), + Field("trySuccess", OptionType(StringType), resolve = _ => Success("try!")), + Field("tryError", OptionType(StringType), resolve = _ => Failure(new IllegalStateException("try boom!"))), + Field("error", OptionType(StringType), resolve = _ => throw new IllegalStateException("Boom!")), + Field("futureError", OptionType(StringType), resolve = _ => Future.failed[String](new IllegalStateException("Boom!"))) )) val schema = Schema(TestType) @@ -50,25 +50,25 @@ class ExceptionHandlingSpec extends WordSpec with Matchers with FutureResultSupp Executor.execute(schema, doc).await should be ( Map( - "data" → Map( - "success" → "Yay", - "trySuccess" → "try!", - "tryError" → null, - "error" → null, - "futureError" → null), - "errors" → List( + "data" -> Map( + "success" -> "Yay", + "trySuccess" -> "try!", + "tryError" -> null, + "error" -> null, + "futureError" -> null), + "errors" -> List( Map( - "message" → "Internal server error", - "path" → List("error"), - "locations" → List(Map("line" → 6, "column" → 11))), + "message" -> "Internal server error", + "path" -> List("error"), + "locations" -> List(Map("line" -> 6, "column" -> 11))), Map( - "message" → "Internal server error", - "path" → List("tryError"), - "locations" → List(Map("line" → 4, "column" → 11))), + "message" -> "Internal server error", + "path" -> List("tryError"), + "locations" -> List(Map("line" -> 4, "column" -> 11))), Map( - "message" → "Internal server error", - "path" → List("futureError"), - "locations" → List(Map("line" → 7, "column" → 11)))))) + "message" -> "Internal server error", + "path" -> List("futureError"), + "locations" -> List(Map("line" -> 7, "column" -> 11)))))) } out should include ("java.lang.IllegalStateException: Boom!") @@ -83,23 +83,23 @@ class ExceptionHandlingSpec extends WordSpec with Matchers with FutureResultSupp """) val exceptionHandler = ExceptionHandler { - case (m, e: IllegalStateException) ⇒ HandledException(e.getMessage) + case (m, e: IllegalStateException) => HandledException(e.getMessage) } Executor.execute(schema, doc, exceptionHandler = exceptionHandler).await should be ( Map( - "data" → Map( - "error" → null, - "futureError" → null), - "errors" → List( + "data" -> Map( + "error" -> null, + "futureError" -> null), + "errors" -> List( Map( - "message" → "Boom!", - "path" → List("error"), - "locations" → List(Map("line" → 3, "column" → 11))), + "message" -> "Boom!", + "path" -> List("error"), + "locations" -> List(Map("line" -> 3, "column" -> 11))), Map( - "message" → "Boom!", - "path" → List("futureError"), - "locations" → List(Map("line" → 4, "column" → 11)))))) + "message" -> "Boom!", + "path" -> List("futureError"), + "locations" -> List(Map("line" -> 4, "column" -> 11)))))) } "provide user-defined exception handling mechanism which allows to provide additional fields" in { @@ -111,30 +111,30 @@ class ExceptionHandlingSpec extends WordSpec with Matchers with FutureResultSupp """) val exceptionHandler = ExceptionHandler { - case (m, e: IllegalStateException) ⇒ - HandledException(e.getMessage, Map("foo" → m.list(m.fromString("bar"), m.fromInt(1234)), "baz" → m.fromString("Test"))) + case (m, e: IllegalStateException) => + HandledException(e.getMessage, Map("foo" -> m.list(m.fromString("bar"), m.fromInt(1234)), "baz" -> m.fromString("Test"))) } Executor.execute(schema, doc, exceptionHandler = exceptionHandler).await should be ( Map( - "data" → Map( - "error" → null, - "futureError" → null), - "errors" → List( + "data" -> Map( + "error" -> null, + "futureError" -> null), + "errors" -> List( Map( - "message" → "Boom!", - "path" → List("error"), - "locations" → List(Map("line" → 3, "column" → 11)), - "extensions" → Map( - "foo" → List("bar", 1234), - "baz" → "Test")), + "message" -> "Boom!", + "path" -> List("error"), + "locations" -> List(Map("line" -> 3, "column" -> 11)), + "extensions" -> Map( + "foo" -> List("bar", 1234), + "baz" -> "Test")), Map( - "message" → "Boom!", - "path" → List("futureError"), - "locations" → List(Map("line" → 4, "column" → 11)), - "extensions" → Map( - "foo" → List("bar", 1234), - "baz" → "Test"))))) + "message" -> "Boom!", + "path" -> List("futureError"), + "locations" -> List(Map("line" -> 4, "column" -> 11)), + "extensions" -> Map( + "foo" -> List("bar", 1234), + "baz" -> "Test"))))) } "handle violation-based errors" in { @@ -147,36 +147,36 @@ class ExceptionHandlingSpec extends WordSpec with Matchers with FutureResultSupp """) val exceptionHandler = ExceptionHandler (onViolation = { - case (m, BadValueViolation(_, _, Some(v: EmailTypeViolation.type), _, _)) ⇒ - HandledException("Scalar", Map("original" → m.scalarNode(v.errorMessage, "String", Set.empty))) - case (m, v: UndefinedFieldViolation) ⇒ - HandledException("Field is missing!!! D:", Map("fieldName" → m.scalarNode(v.fieldName, "String", Set.empty))) - case (_, v: AstNodeLocation) ⇒ + case (m, BadValueViolation(_, _, Some(v: EmailTypeViolation.type), _, _)) => + HandledException("Scalar", Map("original" -> m.scalarNode(v.errorMessage, "String", Set.empty))) + case (m, v: UndefinedFieldViolation) => + HandledException("Field is missing!!! D:", Map("fieldName" -> m.scalarNode(v.fieldName, "String", Set.empty))) + case (_, v: AstNodeLocation) => HandledException(v.simpleErrorMessage + " [with extras]") }) val res = Executor.execute(schema, doc, exceptionHandler = exceptionHandler).recover { - case analysis: QueryAnalysisError ⇒ analysis.resolveError + case analysis: QueryAnalysisError => analysis.resolveError } res.await should be ( Map( - "data" → null, - "errors" → Vector( + "data" -> null, + "errors" -> Vector( Map( - "message" → "Field is missing!!! D:", - "locations" → Vector(Map("line" → 3, "column" → 11)), - "extensions" → Map( - "fieldName" → "nonExistingField")), + "message" -> "Field is missing!!! D:", + "locations" -> Vector(Map("line" -> 3, "column" -> 11)), + "extensions" -> Map( + "fieldName" -> "nonExistingField")), Map( - "message" → "Expected type 'Int', found '\"One\"'. Int value expected [with extras]", - "locations" → Vector(Map("line" → 4, "column" → 24))), + "message" -> "Expected type 'Int', found '\"One\"'. Int value expected [with extras]", + "locations" -> Vector(Map("line" -> 4, "column" -> 24))), Map( - "message" → "Scalar", - "locations" → Vector(Map("line" → 5, "column" → 32)), - "extensions" → Map( - "original" → "Invalid email"))))) + "message" -> "Scalar", + "locations" -> Vector(Map("line" -> 5, "column" -> 32)), + "extensions" -> Map( + "original" -> "Invalid email"))))) } "handle user-facing errors errors" in { @@ -187,23 +187,23 @@ class ExceptionHandlingSpec extends WordSpec with Matchers with FutureResultSupp """) val exceptionHandler = ExceptionHandler (onUserFacingError = { - case (m, e: OperationSelectionError) ⇒ - HandledException("Wrong operation?!", Map("errorCode" → m.scalarNode("AAAAAaaAA!", "String", Set.empty))) + case (m, e: OperationSelectionError) => + HandledException("Wrong operation?!", Map("errorCode" -> m.scalarNode("AAAAAaaAA!", "String", Set.empty))) }) val res = Executor.execute(schema, doc, operationName = Some("Bar"), exceptionHandler = exceptionHandler).recover { - case analysis: QueryAnalysisError ⇒ analysis.resolveError + case analysis: QueryAnalysisError => analysis.resolveError } res.await should be ( Map( - "data" → null, - "errors" → Vector( + "data" -> null, + "errors" -> Vector( Map( - "message" → "Wrong operation?!", - "extensions" → Map( - "errorCode" → "AAAAAaaAA!"))))) + "message" -> "Wrong operation?!", + "extensions" -> Map( + "errorCode" -> "AAAAAaaAA!"))))) } "allow multiple handled errors with ast positions" in { @@ -214,68 +214,68 @@ class ExceptionHandlingSpec extends WordSpec with Matchers with FutureResultSupp """.stripCR) val exceptionHandler = ExceptionHandler { - case (m, e: IllegalStateException) ⇒ + case (m, e: IllegalStateException) => HandledException.multiple( Vector( - ("Error 1", Map("errorCode" → m.scalarNode("OOPS", "String", Set.empty)), Nil), + ("Error 1", Map("errorCode" -> m.scalarNode("OOPS", "String", Set.empty)), Nil), ("Error 2", Map.empty[String, m.Node], doc.operations.head._2.location.toList))) } Executor.execute(schema, doc, exceptionHandler = exceptionHandler).await should be ( Map( - "data" → Map("error" → null), - "errors" → Vector( + "data" -> Map("error" -> null), + "errors" -> Vector( Map( - "message" → "Error 1", - "path" → Vector("error"), - "locations" → Vector( - Map("line" → 3, "column" → 11)), - "extensions" → Map( - "errorCode" → "OOPS")), + "message" -> "Error 1", + "path" -> Vector("error"), + "locations" -> Vector( + Map("line" -> 3, "column" -> 11)), + "extensions" -> Map( + "errorCode" -> "OOPS")), Map( - "message" → "Error 2", - "path" → Vector("error"), - "locations" → Vector( - Map("line" → 3, "column" → 11), - Map("line" → 2, "column" → 9)))))) + "message" -> "Error 2", + "path" -> Vector("error"), + "locations" -> Vector( + Map("line" -> 3, "column" -> 11), + Map("line" -> 2, "column" -> 9)))))) } "provide a way to add extension fields in the error itself (backwards compat)" in { val exceptionHandler = ExceptionHandler { - case (m, e: IllegalStateException) ⇒ - HandledException("Wrong operation?!", Map("errorCode" → m.fromString("Ooops!")), addFieldsInError = true) + case (m, e: IllegalStateException) => + HandledException("Wrong operation?!", Map("errorCode" -> m.fromString("Ooops!")), addFieldsInError = true) } Executor.execute(schema, gql"{error}", exceptionHandler = exceptionHandler).await should be ( Map( - "data" → Map( - "error" → null), - "errors" → Vector( + "data" -> Map( + "error" -> null), + "errors" -> Vector( Map( - "message" → "Wrong operation?!", - "path" → Vector("error"), - "locations" → Vector(Map("line" → 1, "column" → 2)), - "errorCode" → "Ooops!", - "extensions" → Map( - "errorCode" → "Ooops!"))))) + "message" -> "Wrong operation?!", + "path" -> Vector("error"), + "locations" -> Vector(Map("line" -> 1, "column" -> 2)), + "errorCode" -> "Ooops!", + "extensions" -> Map( + "errorCode" -> "Ooops!"))))) } "provide a way to remove extension fields (backwards compat)" in { val exceptionHandler = ExceptionHandler { - case (m, e: IllegalStateException) ⇒ - HandledException("Wrong operation?!", Map("errorCode" → m.fromString("Ooops!")), addFieldsInError = true, addFieldsInExtensions = false) + case (m, e: IllegalStateException) => + HandledException("Wrong operation?!", Map("errorCode" -> m.fromString("Ooops!")), addFieldsInError = true, addFieldsInExtensions = false) } Executor.execute(schema, gql"{error}", exceptionHandler = exceptionHandler).await should be ( Map( - "data" → Map( - "error" → null), - "errors" → Vector( + "data" -> Map( + "error" -> null), + "errors" -> Vector( Map( - "message" → "Wrong operation?!", - "path" → Vector("error"), - "locations" → Vector(Map("line" → 1, "column" → 2)), - "errorCode" → "Ooops!")))) + "message" -> "Wrong operation?!", + "path" -> Vector("error"), + "locations" -> Vector(Map("line" -> 1, "column" -> 2)), + "errorCode" -> "Ooops!")))) } } } diff --git a/src/test/scala/sangria/execution/ExecutorSchemaSpec.scala b/src/test/scala/sangria/execution/ExecutorSchemaSpec.scala index 0ec7c9f6..86532679 100644 --- a/src/test/scala/sangria/execution/ExecutorSchemaSpec.scala +++ b/src/test/scala/sangria/execution/ExecutorSchemaSpec.scala @@ -34,14 +34,14 @@ class ExecutorSchemaSpec extends WordSpec with Matchers with FutureResultSupport Field("width", OptionType(IntType), resolve = _.value.width), Field("height", OptionType(IntType), resolve = _.value.height))) - val BlogAuthorType = ObjectType("Author", () ⇒ fields[Unit, Author]( + val BlogAuthorType = ObjectType("Author", () => fields[Unit, Author]( Field("id", OptionType(StringType), resolve = _.value.id), Field("name", OptionType(StringType), resolve = _.value.name), Field("pic", OptionType(BlogImageType), arguments = Argument("width", OptionInputType(IntType)) :: Argument("height", OptionInputType(IntType)) :: Nil, - resolve = ctx ⇒ for {w ← ctx.argOpt[Int]("width"); h ← ctx.argOpt[Int]("height"); pic ← ctx.value.pic(w, h)} yield pic), + resolve = ctx => for {w <- ctx.argOpt[Int]("width"); h <- ctx.argOpt[Int]("height"); pic <- ctx.value.pic(w, h)} yield pic), Field("recentArticle", OptionType(BlogArticleType), - resolve = ctx ⇒ ctx.value.recentArticle map (ra ⇒ DeferredValue(ArticleDeferred(ra))) getOrElse Value(None)))) + resolve = ctx => ctx.value.recentArticle map (ra => DeferredValue(ArticleDeferred(ra))) getOrElse Value(None)))) val BlogArticleType: ObjectType[Unit, Article] = ObjectType("Article", fields[Unit, Article]( Field("id", StringType, resolve = _.value.id), @@ -54,14 +54,14 @@ class ExecutorSchemaSpec extends WordSpec with Matchers with FutureResultSupport val BlogQueryType = ObjectType("Query", fields[Unit, Unit]( Field("article", OptionType(BlogArticleType), arguments = Argument("id", OptionInputType(IDType)) :: Nil, - resolve = ctx ⇒ ctx.argOpt[String]("id") flatMap (id ⇒ article(id.toInt))), + resolve = ctx => ctx.argOpt[String]("id") flatMap (id => article(id.toInt))), Field("feed", OptionType(ListType(OptionType(BlogArticleType))), - resolve = _ ⇒ (1 to 10).toList.map(article)))) + resolve = _ => (1 to 10).toList.map(article)))) val BlogSubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( Field("articleSubscribe", OptionType(BlogArticleType), arguments = Argument("id", OptionInputType(IDType)) :: Nil, - resolve = ctx ⇒ ctx.argOpt[String]("id") flatMap (id ⇒ article(id.toInt))))) + resolve = ctx => ctx.argOpt[String]("id") flatMap (id => article(id.toInt))))) val BlogSchema = Schema(BlogQueryType, subscription = Some(BlogSubscriptionType)) @@ -112,37 +112,37 @@ class ExecutorSchemaSpec extends WordSpec with Matchers with FutureResultSupport """) val expected = Map( - "data" → Map( - "feed" → List( - Map("id" → "1", "title" → "My Article 1"), - Map("id" → "2", "title" → "My Article 2"), - Map("id" → "3", "title" → "My Article 3"), - Map("id" → "4", "title" → "My Article 4"), - Map("id" → "5", "title" → "My Article 5"), - Map("id" → "6", "title" → "My Article 6"), - Map("id" → "7", "title" → "My Article 7"), - Map("id" → "8", "title" → "My Article 8"), - Map("id" → "9", "title" → "My Article 9"), - Map("id" → "10", "title" → "My Article 10")), - "article" → Map( - "id" → "1", - "isPublished" → true, - "title" → "My Article 1", - "body" → "This is a post", - "author" → Map( - "id" → "123", - "name" → "John Smith", - "pic" → Map( - "url" → "cdn://123", - "width" → 640, - "height" → 480 + "data" -> Map( + "feed" -> List( + Map("id" -> "1", "title" -> "My Article 1"), + Map("id" -> "2", "title" -> "My Article 2"), + Map("id" -> "3", "title" -> "My Article 3"), + Map("id" -> "4", "title" -> "My Article 4"), + Map("id" -> "5", "title" -> "My Article 5"), + Map("id" -> "6", "title" -> "My Article 6"), + Map("id" -> "7", "title" -> "My Article 7"), + Map("id" -> "8", "title" -> "My Article 8"), + Map("id" -> "9", "title" -> "My Article 9"), + Map("id" -> "10", "title" -> "My Article 10")), + "article" -> Map( + "id" -> "1", + "isPublished" -> true, + "title" -> "My Article 1", + "body" -> "This is a post", + "author" -> Map( + "id" -> "123", + "name" -> "John Smith", + "pic" -> Map( + "url" -> "cdn://123", + "width" -> 640, + "height" -> 480 ), - "recentArticle" → Map( - "id" → "1", - "isPublished" → true, - "title" → "My Article 1", - "body" → "This is a post", - "keywords" → List("foo", "bar", null, "1") + "recentArticle" -> Map( + "id" -> "1", + "isPublished" -> true, + "title" -> "My Article 1", + "body" -> "This is a post", + "keywords" -> List("foo", "bar", null, "1") ) ) ) @@ -151,7 +151,7 @@ class ExecutorSchemaSpec extends WordSpec with Matchers with FutureResultSupport val resolver = new DeferredResolver[Any] { def resolve(deferred: Vector[Deferred[Any]], ctx: Any, queryState: Any)(implicit ec: ExecutionContext) = deferred map { - case ArticleDeferred(id) ⇒ Future.successful(article(id.toInt)) + case ArticleDeferred(id) => Future.successful(article(id.toInt)) } } @@ -191,31 +191,31 @@ class ExecutorSchemaSpec extends WordSpec with Matchers with FutureResultSupport val resolver = new DeferredResolver[Any] { def resolve(deferred: Vector[Deferred[Any]], ctx: Any, queryState: Any)(implicit ec: ExecutionContext) = deferred map { - case ArticleDeferred(id) ⇒ Future.successful(article(id.toInt)) + case ArticleDeferred(id) => Future.successful(article(id.toInt)) } } Executor.execute(BlogSchema, query, deferredResolver = resolver, queryValidator = QueryValidator.empty).await should be (Map( - "data" → Map( - "articleSubscribe" → Map( - "id" → "1", - "isPublished" → true, - "title" → "My Article 1", - "body" → "This is a post", - "author" → Map( - "id" → "123", - "name" → "John Smith", - "pic" → Map( - "url" → "cdn://123", - "width" → 640, - "height" → 480 + "data" -> Map( + "articleSubscribe" -> Map( + "id" -> "1", + "isPublished" -> true, + "title" -> "My Article 1", + "body" -> "This is a post", + "author" -> Map( + "id" -> "123", + "name" -> "John Smith", + "pic" -> Map( + "url" -> "cdn://123", + "width" -> 640, + "height" -> 480 ), - "recentArticle" → Map( - "id" → "1", - "isPublished" → true, - "title" → "My Article 1", - "body" → "This is a post", - "keywords" → List("foo", "bar", null, "1") + "recentArticle" -> Map( + "id" -> "1", + "isPublished" -> true, + "title" -> "My Article 1", + "body" -> "This is a post", + "keywords" -> List("foo", "bar", null, "1") ) ) ) @@ -223,4 +223,4 @@ class ExecutorSchemaSpec extends WordSpec with Matchers with FutureResultSupport )) } } -} \ No newline at end of file +} diff --git a/src/test/scala/sangria/execution/ExecutorSpec.scala b/src/test/scala/sangria/execution/ExecutorSpec.scala index 02bb8206..917b64ac 100644 --- a/src/test/scala/sangria/execution/ExecutorSpec.scala +++ b/src/test/scala/sangria/execution/ExecutorSpec.scala @@ -45,19 +45,19 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { class LightColorResolver extends DeferredResolver[Any] { def resolve(deferred: Vector[Deferred[Any]], ctx: Any, queryState: Any)(implicit ec: ExecutionContext) = deferred map { - case LightColor(v, c) ⇒ Future.successful(v.deepColor("light" + c)) - case FailColor(v, c) ⇒ Future.failed(new IllegalStateException("error in resolver")) + case LightColor(v, c) => Future.successful(v.deepColor("light" + c)) + case FailColor(v, c) => Future.failed(new IllegalStateException("error in resolver")) } } class BrokenLightColorResolver extends DeferredResolver[Any] { def resolve(deferred: Vector[Deferred[Any]], ctx: Any, queryState: Any)(implicit ec: ExecutionContext) = (deferred ++ deferred) map { - case LightColor(v, c) ⇒ Future.successful(v.deepColor("light" + c)) - case FailColor(v, c) ⇒ Future.failed(new IllegalStateException("error in resolver")) + case LightColor(v, c) => Future.successful(v.deepColor("light" + c)) + case FailColor(v, c) => Future.failed(new IllegalStateException("error in resolver")) } } - val DeepDataType = ObjectType("DeepDataType", () ⇒ fields[Ctx, DeepTestSubject]( + val DeepDataType = ObjectType("DeepDataType", () => fields[Ctx, DeepTestSubject]( Field("a", OptionType(StringType), resolve = _.value.a), Field("b", OptionType(StringType), resolve = _.value.b), Field("c", OptionType(ListType(OptionType(StringType))), resolve = _.value.c), @@ -66,7 +66,7 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { Field("deeper", OptionType(ListType(OptionType(DataType))), resolve = _.value.deeper) )) - val DataType: ObjectType[Ctx, TestSubject] = ObjectType("DataType", () ⇒ fields[Ctx, TestSubject]( + val DataType: ObjectType[Ctx, TestSubject] = ObjectType("DataType", () => fields[Ctx, TestSubject]( Field("a", OptionType(StringType), resolve = _.value.a), Field("b", OptionType(StringType), resolve = _.value.b), Field("c", OptionType(StringType), resolve = _.value.c), @@ -74,30 +74,30 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { Field("e", OptionType(StringType), resolve = _.value.e), Field("f", OptionType(StringType), resolve = _.value.f), Field("ctxUpdating", DeepDataType, resolve = - ctx ⇒ UpdateCtx(ctx.value.deepColor("blue"))(v ⇒ ctx.ctx.copy(color = v.color))), + ctx => UpdateCtx(ctx.value.deepColor("blue"))(v => ctx.ctx.copy(color = v.color))), Field("ctxUpdatingFut", DeepDataType, resolve = - ctx ⇒ UpdateCtx(Future.successful(ctx.value.deepColor("orange")))(v ⇒ ctx.ctx.copy(color = v.color))), + ctx => UpdateCtx(Future.successful(ctx.value.deepColor("orange")))(v => ctx.ctx.copy(color = v.color))), Field("ctxUpdatingDef", DeepDataType, resolve = - ctx ⇒ UpdateCtx(LightColor(ctx.value, "magenta"))(v ⇒ ctx.ctx.copy(color = v.color))), + ctx => UpdateCtx(LightColor(ctx.value, "magenta"))(v => ctx.ctx.copy(color = v.color))), Field("ctxUpdatingDefFut", DeepDataType, resolve = - ctx ⇒ UpdateCtx(DeferredFutureValue(Future.successful(LightColor(ctx.value, "red"))))(v ⇒ ctx.ctx.copy(color = v.color))), - Field("def", DeepDataType, resolve = ctx ⇒ LightColor(ctx.value, "magenta")), - Field("defFut", DeepDataType, resolve = ctx ⇒ DeferredFutureValue(Future.successful(LightColor(ctx.value, "red")))), - Field("defFail", OptionType(DeepDataType), resolve = ctx ⇒ FailColor(ctx.value, "magenta")), - Field("defFutFail", OptionType(DeepDataType), resolve = ctx ⇒ DeferredFutureValue(Future.successful(FailColor(ctx.value, "red")))), + ctx => UpdateCtx(DeferredFutureValue(Future.successful(LightColor(ctx.value, "red"))))(v => ctx.ctx.copy(color = v.color))), + Field("def", DeepDataType, resolve = ctx => LightColor(ctx.value, "magenta")), + Field("defFut", DeepDataType, resolve = ctx => DeferredFutureValue(Future.successful(LightColor(ctx.value, "red")))), + Field("defFail", OptionType(DeepDataType), resolve = ctx => FailColor(ctx.value, "magenta")), + Field("defFutFail", OptionType(DeepDataType), resolve = ctx => DeferredFutureValue(Future.successful(FailColor(ctx.value, "red")))), Field("pic", OptionType(StringType), arguments = Argument("size", OptionInputType(IntType)) :: Nil, - resolve = ctx ⇒ ctx.value.pic(ctx.argOpt[Int]("size"))), + resolve = ctx => ctx.value.pic(ctx.argOpt[Int]("size"))), Field("deep", OptionType(DeepDataType), resolve = _.value.deep), Field("future", OptionType(DataType), resolve = _.value.future) )) - val ParallelFragmentType: ObjectType[Unit, Unit] = ObjectType("Type", () ⇒ fields[Unit, Unit]( - Field("a", OptionType(StringType), resolve = _ ⇒ "Apple"), - Field("b", OptionType(StringType), resolve = _ ⇒ "Banana"), - Field("c", OptionType(StringType), resolve = _ ⇒ "Cherry"), - Field("d", StringType, resolve = _ ⇒ "Door"), - Field("deep", OptionType(ParallelFragmentType), resolve = _ ⇒ ()) + val ParallelFragmentType: ObjectType[Unit, Unit] = ObjectType("Type", () => fields[Unit, Unit]( + Field("a", OptionType(StringType), resolve = _ => "Apple"), + Field("b", OptionType(StringType), resolve = _ => "Banana"), + Field("c", OptionType(StringType), resolve = _ => "Cherry"), + Field("d", StringType, resolve = _ => "Door"), + Field("deep", OptionType(ParallelFragmentType), resolve = _ => ()) )) "Execute: Handles basic execution tasks" should { @@ -133,23 +133,23 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { """ val expected = Map( - "data" → Map( - "a" → "Apple", - "b" → "Banana", - "x" → "Cookie", - "d" → "Donut", - "e" → "Egg", - "f" → "Fish", - "pic" → "Pic of size: 100", - "future" → Map("a" → "Apple"), - "deep" → Map( - "a" → "Already Been Done", - "b" → "Boring", - "c" → List("Contrived", null, "Confusing"), - "deeper" → List( - Map("a" → "Apple", "b" → "Banana"), + "data" -> Map( + "a" -> "Apple", + "b" -> "Banana", + "x" -> "Cookie", + "d" -> "Donut", + "e" -> "Egg", + "f" -> "Fish", + "pic" -> "Pic of size: 100", + "future" -> Map("a" -> "Apple"), + "deep" -> Map( + "a" -> "Already Been Done", + "b" -> "Boring", + "c" -> List("Contrived", null, "Confusing"), + "deeper" -> List( + Map("a" -> "Apple", "b" -> "Banana"), null, - Map("a" → "Apple", "b" → "Banana") + Map("a" -> "Apple", "b" -> "Banana") ) ) ) @@ -157,7 +157,7 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { val schema = Schema(DataType) - Executor.execute(schema, doc, Ctx(), new TestSubject, variables = mapVars(Map("size" → 100))).await should be (expected) + Executor.execute(schema, doc, Ctx(), new TestSubject, variables = mapVars(Map("size" -> 100))).await should be (expected) } "prepare and execute arbitrary queries" in { @@ -192,23 +192,23 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { """ val expected = Map( - "data" → Map( - "a" → "Apple", - "b" → "Banana", - "x" → "Cookie", - "d" → "Donut", - "e" → "Egg", - "f" → "Fish", - "pic" → "Pic of size: 100", - "future" → Map("a" → "Apple"), - "deep" → Map( - "a" → "Already Been Done", - "b" → "Boring", - "c" → List("Contrived", null, "Confusing"), - "deeper" → List( - Map("a" → "Apple", "b" → "Banana"), + "data" -> Map( + "a" -> "Apple", + "b" -> "Banana", + "x" -> "Cookie", + "d" -> "Donut", + "e" -> "Egg", + "f" -> "Fish", + "pic" -> "Pic of size: 100", + "future" -> Map("a" -> "Apple"), + "deep" -> Map( + "a" -> "Already Been Done", + "b" -> "Boring", + "c" -> List("Contrived", null, "Confusing"), + "deeper" -> List( + Map("a" -> "Apple", "b" -> "Banana"), null, - Map("a" → "Apple", "b" → "Banana") + Map("a" -> "Apple", "b" -> "Banana") ) ) ) @@ -216,7 +216,7 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { val schema = Schema(DataType) - val preparedQuery = Executor.prepare(schema, doc, Ctx(), new TestSubject, variables = mapVars(Map("size" → 100))).await + val preparedQuery = Executor.prepare(schema, doc, Ctx(), new TestSubject, variables = mapVars(Map("size" -> 100))).await preparedQuery.execute().await should be (expected) } @@ -230,7 +230,7 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { } """ - val expected = Map("data" → Map("pic" → "Pic of size: 100")) + val expected = Map("data" -> Map("pic" -> "Pic of size: 100")) val schema = Schema(DataType) @@ -261,7 +261,7 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { def reduceCtx(acc: Acc, ctx: Ctx): ReduceAction[Ctx, Ctx] = Value(ctx) } - Executor.execute(schema, doc, Ctx(), new TestSubject, variables = mapVars(Map("size" → 100)), queryReducers = PicSizeFinderReducer :: Nil ).await should be (expected) + Executor.execute(schema, doc, Ctx(), new TestSubject, variables = mapVars(Map("size" -> 100)), queryReducers = PicSizeFinderReducer :: Nil ).await should be (expected) sizeValue should be (100) } @@ -277,7 +277,7 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { val schema = Schema(DataType) an [ValidationError] should be thrownBy - Executor.prepare(schema, doc, Ctx(), new TestSubject, variables = mapVars(Map("size" → 100))).await + Executor.prepare(schema, doc, Ctx(), new TestSubject, variables = mapVars(Map("size" -> 100))).await } "prepare should execute query reducers in the preparation stage" in { @@ -291,8 +291,8 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { val schema = Schema(DataType) - val introQR = QueryReducer.hasIntrospection[Ctx]((hasIntro, ctx) ⇒ ctx.copy(color = if (hasIntro) "red" else "blue")) - val failQR = QueryReducer.hasIntrospection[Ctx]((hasIntro, ctx) ⇒ if (hasIntro) throw new IllegalStateException("foo") else ctx) + val introQR = QueryReducer.hasIntrospection[Ctx]((hasIntro, ctx) => ctx.copy(color = if (hasIntro) "red" else "blue")) + val failQR = QueryReducer.hasIntrospection[Ctx]((hasIntro, ctx) => if (hasIntro) throw new IllegalStateException("foo") else ctx) an [QueryReducingError] should be thrownBy Executor.prepare(schema, doc, Ctx(), new TestSubject, queryReducers = failQR :: Nil).await @@ -340,49 +340,49 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { """) val expected = Map( - "data" → Map( - "d1" → Map( - "deep" → Map( - "deep" → Map( - "deep" → Map( - "deep" → Map( - "a" → "Apple", - "deep" → Map( - "b" → null + "data" -> Map( + "d1" -> Map( + "deep" -> Map( + "deep" -> Map( + "deep" -> Map( + "deep" -> Map( + "a" -> "Apple", + "deep" -> Map( + "b" -> null ) ) ) ) ) ), - "d2" → Map( - "deep" → Map( - "deep" → Map( - "deep" → Map( - "deep" → Map( - "a" → "Apple", - "deep" → null + "d2" -> Map( + "deep" -> Map( + "deep" -> Map( + "deep" -> Map( + "deep" -> Map( + "a" -> "Apple", + "deep" -> null ) ) ) ) ) ), - "errors" → List( + "errors" -> List( Map( - "message" → "Max query depth 6 is reached.", - "path" → List("d1", "deep", "deep", "deep", "deep", "deep", "b"), - "locations" → List(Map("line" → 10, "column" → 23)) + "message" -> "Max query depth 6 is reached.", + "path" -> List("d1", "deep", "deep", "deep", "deep", "deep", "b"), + "locations" -> List(Map("line" -> 10, "column" -> 23)) ), Map( - "message" → "Max query depth 6 is reached.", - "path" → List("d2", "deep", "deep", "deep", "deep", "deep", "b"), - "locations" → List(Map("line" → 24, "column" → 23)) + "message" -> "Max query depth 6 is reached.", + "path" -> List("d2", "deep", "deep", "deep", "deep", "deep", "b"), + "locations" -> List(Map("line" -> 24, "column" -> 23)) ), Map( - "message" → "Max query depth 6 is reached.", - "path" → List("d2", "deep", "deep", "deep", "deep", "deep", "d"), - "locations" → List(Map("line" → 25, "column" → 23)) + "message" -> "Max query depth 6 is reached.", + "path" -> List("d2", "deep", "deep", "deep", "deep", "deep", "d"), + "locations" -> List(Map("line" -> 25, "column" -> 23)) ) ) ) @@ -410,16 +410,16 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { """) val expected = Map( - "data" → Map( - "a" → "Apple", - "b" → "Banana", - "c" → "Cherry", - "deep" → Map( - "b" → "Banana", - "c" → "Cherry", - "deeper" → Map( - "b" → "Banana", - "c" → "Cherry"))) + "data" -> Map( + "a" -> "Apple", + "b" -> "Banana", + "c" -> "Cherry", + "deep" -> Map( + "b" -> "Banana", + "c" -> "Cherry", + "deeper" -> Map( + "b" -> "Banana", + "c" -> "Cherry"))) ) Executor.execute(schema, doc).await should be (expected) @@ -431,11 +431,11 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { var resolvedCtx: Option[String] = None val schema = Schema(ObjectType("Type", fields[Unit, Thing]( - Field("a", OptionType(StringType), resolve = ctx ⇒ {resolvedCtx = ctx.value.a; ctx.value.a})))) + Field("a", OptionType(StringType), resolve = ctx => {resolvedCtx = ctx.value.a; ctx.value.a})))) val Success(doc) = QueryParser.parse("query Example { a }") - Executor.execute(schema, doc, root = Thing(Some("thing"))).await should be (Map("data" → Map("a" → "thing"))) + Executor.execute(schema, doc, root = Thing(Some("thing"))).await should be (Map("data" -> Map("a" -> "thing"))) resolvedCtx should be (Some("thing")) } @@ -446,7 +446,7 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { val schema = Schema(ObjectType("Type", fields[Unit, Unit]( Field("b", OptionType(StringType), arguments = Argument("numArg", OptionInputType(IntType)) :: Argument("stringArg", OptionInputType(StringType)) :: Nil, - resolve = ctx ⇒ {resolvedArgs = ctx.args.raw; None})))) + resolve = ctx => {resolvedArgs = ctx.args.raw; None})))) val Success(doc) = QueryParser.parse(""" query Example { @@ -455,7 +455,7 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { """) Executor.execute(schema, doc).await - resolvedArgs should be (Map("numArg" → Some(123), "stringArg" → Some("foo"))) + resolvedArgs should be (Map("numArg" -> Some(123), "stringArg" -> Some("foo"))) } "null out error subtrees" in { @@ -473,12 +473,12 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { Field("sync", OptionType(StringType), resolve = _.value.sync), Field("syncError", OptionType(StringType), resolve = _.value.syncError), Field("async", OptionType(StringType), resolve = _.value.async), - Field("asyncReject", OptionType(StringType), resolve = ctx ⇒ ctx.value.asyncReject), + Field("asyncReject", OptionType(StringType), resolve = ctx => ctx.value.asyncReject), Field("asyncError", OptionType(StringType), resolve = _.value.asyncError), Field("syncDeferError", OptionType(StringType), - resolve = ctx ⇒ DeferredValue(throw new IllegalStateException("Error getting syncDeferError"))), + resolve = ctx => DeferredValue(throw new IllegalStateException("Error getting syncDeferError"))), Field("asyncDeferError", OptionType(StringType), - resolve = _ ⇒ DeferredFutureValue(Future.failed(throw new IllegalStateException("Error getting asyncDeferError")))) + resolve = _ => DeferredFutureValue(Future.failed(throw new IllegalStateException("Error getting asyncDeferError")))) ))) val Success(doc) = QueryParser.parse(""" @@ -493,7 +493,7 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { }""") val exceptionHandler = ExceptionHandler { - case (m, e: IllegalStateException) ⇒ HandledException(e.getMessage) + case (m, e: IllegalStateException) => HandledException(e.getMessage) } val result = Executor.execute(schema, doc, root = new Data, exceptionHandler = exceptionHandler).await.asInstanceOf[Map[String, Any]] @@ -502,57 +502,57 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { val errors = result("errors").asInstanceOf[Seq[_]] data should be (Map( - "sync" → "sync", - "syncError" → null, - "async" → "async", - "asyncReject" → null, - "asyncError" → null, - "asyncDeferError" → null, - "syncDeferError" → null + "sync" -> "sync", + "syncError" -> null, + "async" -> "async", + "asyncReject" -> null, + "asyncError" -> null, + "asyncDeferError" -> null, + "syncDeferError" -> null )) errors should (have(size(5)) and contain(Map( - "path" → List("syncError"), - "locations" → List(Map("line" → 4, "column" → 14)), - "message" → "Error getting syncError")) and + "path" -> List("syncError"), + "locations" -> List(Map("line" -> 4, "column" -> 14)), + "message" -> "Error getting syncError")) and contain(Map( - "path" → List("asyncReject"), - "locations" → List(Map("line" → 6, "column" → 11)), - "message" → "Error getting asyncReject")) and + "path" -> List("asyncReject"), + "locations" -> List(Map("line" -> 6, "column" -> 11)), + "message" -> "Error getting asyncReject")) and contain(Map( - "message" → "Error getting asyncDeferError", - "path" → List("asyncDeferError"), - "locations" → List(Map("line" → 7, "column" → 12)))) and + "message" -> "Error getting asyncDeferError", + "path" -> List("asyncDeferError"), + "locations" -> List(Map("line" -> 7, "column" -> 12)))) and contain(Map( - "message" → "Error getting syncDeferError", - "path" → List("syncDeferError"), - "locations" → List(Map("line" → 9, "column" → 15)))) and + "message" -> "Error getting syncDeferError", + "path" -> List("syncDeferError"), + "locations" -> List(Map("line" -> 9, "column" -> 15)))) and contain(Map( - "path" → List("asyncError"), - "locations" → List(Map("line" → 8, "column" → 15)), - "message" → "Error getting asyncError"))) + "path" -> List("asyncError"), + "locations" -> List(Map("line" -> 8, "column" -> 15)), + "message" -> "Error getting asyncError"))) } "use the inline operation if no operation is provided" in { val schema = Schema(ObjectType("Type", fields[Unit, Unit]( - Field("a", OptionType(StringType), resolve = _ ⇒ "b")))) + Field("a", OptionType(StringType), resolve = _ => "b")))) val Success(doc) = QueryParser.parse("{ a }") - Executor.execute(schema, doc).await should be (Map("data" → Map("a" → "b"))) + Executor.execute(schema, doc).await should be (Map("data" -> Map("a" -> "b"))) } "use the only operation if no operation is provided" in { val schema = Schema(ObjectType("Type", fields[Unit, Unit]( - Field("a", OptionType(StringType), resolve = _ ⇒ "b")))) + Field("a", OptionType(StringType), resolve = _ => "b")))) val Success(doc) = QueryParser.parse("query Example { a }") - Executor.execute(schema, doc).await should be (Map("data" → Map("a" → "b"))) + Executor.execute(schema, doc).await should be (Map("data" -> Map("a" -> "b"))) } "throw if no operation is provided with multiple operations" in { val schema = Schema(ObjectType("Type", fields[Unit, Unit]( - Field("a", OptionType(StringType), resolve = _ ⇒ "b")))) + Field("a", OptionType(StringType), resolve = _ => "b")))) val Success(doc) = QueryParser.parse("query Example { a } query OtherExample { a }") val error = intercept [OperationSelectionError] (Executor.execute(schema, doc).await) @@ -562,7 +562,7 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { "throw if the operation name is invalid" in { val schema = Schema(ObjectType("Type", fields[Unit, Unit]( - Field("a", OptionType(StringType), resolve = _ ⇒ "b")))) + Field("a", OptionType(StringType), resolve = _ => "b")))) val Success(doc) = QueryParser.parse("query Example { a }") val error = intercept [OperationSelectionError] (Executor.execute(schema, doc, operationName = Some("Eggsample")).await) @@ -572,15 +572,15 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { "use correct schema type schema for operation" in { val schema = Schema( - ObjectType("Q", fields[Unit, Unit](Field("a", OptionType(StringType), resolve = _ ⇒ "b"))), - Some(ObjectType("M", fields[Unit, Unit](Field("c", OptionType(StringType), resolve = _ ⇒ "d")))), - Some(ObjectType("S", fields[Unit, Unit](Field("e", OptionType(StringType), resolve = _ ⇒ "f"))))) + ObjectType("Q", fields[Unit, Unit](Field("a", OptionType(StringType), resolve = _ => "b"))), + Some(ObjectType("M", fields[Unit, Unit](Field("c", OptionType(StringType), resolve = _ => "d")))), + Some(ObjectType("S", fields[Unit, Unit](Field("e", OptionType(StringType), resolve = _ => "f"))))) val Success(doc) = QueryParser.parse("query Q { a } mutation M { c } subscription S { e }") - Executor.execute(schema, doc, operationName = Some("Q")).await should be (Map("data" → Map("a" → "b"))) - Executor.execute(schema, doc, operationName = Some("M")).await should be (Map("data" → Map("c" → "d"))) - Executor.execute(schema, doc, operationName = Some("S")).await should be (Map("data" → Map("e" → "f"))) + Executor.execute(schema, doc, operationName = Some("Q")).await should be (Map("data" -> Map("a" -> "b"))) + Executor.execute(schema, doc, operationName = Some("M")).await should be (Map("data" -> Map("c" -> "d"))) + Executor.execute(schema, doc, operationName = Some("S")).await should be (Map("data" -> Map("e" -> "f"))) } "correct field ordering despite execution order" in { @@ -588,16 +588,16 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { class MyResolver extends DeferredResolver[Any] { def resolve(deferred: Vector[Deferred[Any]], ctx: Any, queryState: Any)(implicit ec: ExecutionContext) = deferred map { - case Sum(a, b) ⇒ Future(a + b)(ec) + case Sum(a, b) => Future(a + b)(ec) } } val schema = Schema(ObjectType("Type", fields[Unit, Unit]( - Field("c", OptionType(StringType), resolve = _ ⇒ "c"), - Field("a", OptionType(StringType), resolve = _ ⇒ Future {Thread.sleep(30); "a"}), - Field("d", OptionType(StringType), resolve = _ ⇒ Future {Thread.sleep(5); "d"}), - Field("b", OptionType(IntType), resolve = _ ⇒ Sum(1, 2)), - Field("e", OptionType(StringType), resolve = _ ⇒ "e")))) + Field("c", OptionType(StringType), resolve = _ => "c"), + Field("a", OptionType(StringType), resolve = _ => Future {Thread.sleep(30); "a"}), + Field("d", OptionType(StringType), resolve = _ => Future {Thread.sleep(5); "d"}), + Field("b", OptionType(IntType), resolve = _ => Sum(1, 2)), + Field("e", OptionType(StringType), resolve = _ => "e")))) def keys(res: Any) = res.asInstanceOf[Map[String, Any]]("data").asInstanceOf[Map[String, Any]].keys.toList @@ -617,15 +617,15 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { class MyResolver extends DeferredResolver[Any] { def resolve(deferred: Vector[Deferred[Any]], ctx: Any, queryState: Any)(implicit ec: ExecutionContext) = deferred map { - case Sum(a, b) ⇒ Future(a + b)(ec) + case Sum(a, b) => Future(a + b)(ec) } } val schema = Schema(ObjectType("Query", fields[Unit, Unit]( - Field("qux", OptionType(StringType), resolve = _ ⇒ "c"), - Field("bar", OptionType(StringType), resolve = _ ⇒ Future {Thread.sleep(30); "a"}), - Field("foo", OptionType(StringType), resolve = _ ⇒ Future {Thread.sleep(5); "d"}), - Field("baz", OptionType(IntType), resolve = _ ⇒ Sum(1, 2))))) + Field("qux", OptionType(StringType), resolve = _ => "c"), + Field("bar", OptionType(StringType), resolve = _ => Future {Thread.sleep(30); "a"}), + Field("foo", OptionType(StringType), resolve = _ => Future {Thread.sleep(5); "d"}), + Field("baz", OptionType(IntType), resolve = _ => Sum(1, 2))))) import sangria.marshalling.queryAst._ import sangria.ast @@ -684,7 +684,7 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { "avoid recursion" in { val schema = Schema(ObjectType("Type", fields[Unit, Unit]( - Field("a", OptionType(StringType), resolve = _ ⇒ "b")))) + Field("a", OptionType(StringType), resolve = _ => "b")))) val Success(doc) = QueryParser.parse(""" query Q { @@ -700,16 +700,16 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { """) Executor.execute(schema, doc, operationName = Some("Q"), queryValidator = QueryValidator.empty).await should be ( - Map("data" → Map("a" → "b"))) + Map("data" -> Map("a" -> "b"))) } "not include illegal fields in output" in { val schema = Schema( - ObjectType("Q", fields[Unit, Unit](Field("a", OptionType(StringType), resolve = _ ⇒ "b"))), - Some(ObjectType("M", fields[Unit, Unit](Field("c", OptionType(StringType), resolve = _ ⇒ "d"))))) + ObjectType("Q", fields[Unit, Unit](Field("a", OptionType(StringType), resolve = _ => "b"))), + Some(ObjectType("M", fields[Unit, Unit](Field("c", OptionType(StringType), resolve = _ => "d"))))) val Success(doc) = QueryParser.parse("mutation M { thisIsIllegalDontIncludeMe }") - Executor.execute(schema, doc, queryValidator = QueryValidator.empty).await should be (Map("data" → Map())) + Executor.execute(schema, doc, queryValidator = QueryValidator.empty).await should be (Map("data" -> Map())) } "update context in query operations" in { @@ -737,11 +737,11 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { Executor.execute(schema, doc, Ctx(), new TestSubject, deferredResolver = new LightColorResolver).await should be ( Map( - "data" → Map( - "ctxUpdating" → Map("ctxColor" → "blue"), - "ctxUpdatingFut" → Map("ctxColor" → "orange"), - "ctxUpdatingDef" → Map("ctxColor" → "lightmagenta"), - "ctxUpdatingDefFut" → Map("ctxColor" → "lightred")))) + "data" -> Map( + "ctxUpdating" -> Map("ctxColor" -> "blue"), + "ctxUpdatingFut" -> Map("ctxColor" -> "orange"), + "ctxUpdatingDef" -> Map("ctxColor" -> "lightmagenta"), + "ctxUpdatingDefFut" -> Map("ctxColor" -> "lightred")))) } "resolve deferred values correctly" in { @@ -756,9 +756,9 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { Executor.execute(schema, query, root = new TestSubject, userContext = Ctx(), deferredResolver = new LightColorResolver).await should be ( Map( - "data" → Map( - "def" → Map("color" → "lightmagenta"), - "defFut" → Map("color" → "lightred")))) + "data" -> Map( + "def" -> Map("color" -> "lightmagenta"), + "defFut" -> Map("color" -> "lightred")))) } "ensure that deferred resolver complied to the contract" in { @@ -771,7 +771,7 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { val schema = Schema(DataType) val exceptionHandler = ExceptionHandler { - case (m, e: IllegalStateException) ⇒ HandledException(e.getMessage) + case (m, e: IllegalStateException) => HandledException(e.getMessage) } val res = Executor.execute(schema, query, root = new TestSubject, userContext = Ctx(), deferredResolver = new BrokenLightColorResolver, exceptionHandler = exceptionHandler).await.asInstanceOf[Map[String, Any]] @@ -792,7 +792,7 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { val schema = Schema(DataType) val exceptionHandler = ExceptionHandler { - case (m, e: IllegalStateException) ⇒ HandledException(e.getMessage) + case (m, e: IllegalStateException) => HandledException(e.getMessage) } Executor.execute(schema, doc, @@ -801,18 +801,18 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { deferredResolver = new LightColorResolver, exceptionHandler = exceptionHandler).await should be ( Map( - "data" → Map( - "defFail" → null, - "defFutFail" → null), - "errors" → List( + "data" -> Map( + "defFail" -> null, + "defFutFail" -> null), + "errors" -> List( Map( - "message" → "error in resolver", - "path" → List("defFail"), - "locations" → List(Map("line" → 3, "column" → 11))), + "message" -> "error in resolver", + "path" -> List("defFail"), + "locations" -> List(Map("line" -> 3, "column" -> 11))), Map( - "message" → "error in resolver", - "path" → List("defFutFail"), - "locations" → List(Map("line" → 4, "column" → 11)))))) + "message" -> "error in resolver", + "path" -> List("defFutFail"), + "locations" -> List(Map("line" -> 4, "column" -> 11)))))) } "fails to execute a query containing a type definition" in { @@ -839,32 +839,32 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { val QueryType = ObjectType("Query", fields[Unit, Unit]( Field("eager", ListType(StringType), resolve = - _ ⇒ PartialValue(List("a", "c"), Vector(MyListError("error 1"), MyListError("error 2")))), + _ => PartialValue(List("a", "c"), Vector(MyListError("error 1"), MyListError("error 2")))), Field("future", ListType(StringType), resolve = - _ ⇒ PartialFutureValue( + _ => PartialFutureValue( Future.successful( PartialValue[Unit, List[String]](List("d", "f"), Vector(MyListError("error 3"), MyListError("error 4")))))))) val schema = Schema(QueryType) val exceptionHandler = ExceptionHandler { - case (m, e: MyListError) ⇒ HandledException(e.getMessage) + case (m, e: MyListError) => HandledException(e.getMessage) } val result = Executor.execute(schema, doc, exceptionHandler = exceptionHandler).await.asInstanceOf[Map[String, Any]] result("data") should be (Map( - "eager" → Vector("a", "c"), - "future" → Vector("d", "f"))) + "eager" -> Vector("a", "c"), + "future" -> Vector("d", "f"))) val errors = result("errors").asInstanceOf[Seq[Any]] errors should ( have(size(4)) and - contain(Map("message" → "error 1", "path" → List("eager"), "locations" → Vector(Map("line" → 1, "column" → 2)))) and - contain(Map("message" → "error 2", "path" → List("eager"), "locations" → Vector(Map("line" → 1, "column" → 2)))) and - contain(Map("message" → "error 3", "path" → List("future"), "locations" → Vector(Map("line" → 1, "column" → 9)))) and - contain(Map("message" → "error 4", "path" → List("future"), "locations" → Vector(Map("line" → 1, "column" → 9))))) + contain(Map("message" -> "error 1", "path" -> List("eager"), "locations" -> Vector(Map("line" -> 1, "column" -> 2)))) and + contain(Map("message" -> "error 2", "path" -> List("eager"), "locations" -> Vector(Map("line" -> 1, "column" -> 2)))) and + contain(Map("message" -> "error 3", "path" -> List("future"), "locations" -> Vector(Map("line" -> 1, "column" -> 9)))) and + contain(Map("message" -> "error 4", "path" -> List("future"), "locations" -> Vector(Map("line" -> 1, "column" -> 9))))) } "support extended result in queries" in { @@ -876,17 +876,17 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { val QueryType = ObjectType("Query", fields[MyCtx, Unit]( Field("hello", StringType, - complexity = Some((_, _, _) ⇒ 123), - resolve = _ ⇒ "world"), + complexity = Some((_, _, _) => 123), + resolve = _ => "world"), Field("error", OptionType(StringType), - resolve = _ ⇒ throw new IllegalStateException("foo")))) + resolve = _ => throw new IllegalStateException("foo")))) val schema = Schema(QueryType) - val reducer = QueryReducer.measureComplexity[MyCtx]((c, ctx) ⇒ ctx.copy(complexity = c)) + val reducer = QueryReducer.measureComplexity[MyCtx]((c, ctx) => ctx.copy(complexity = c)) val exceptionHandler = ExceptionHandler { - case (m, e: IllegalStateException) ⇒ HandledException(e.getMessage) + case (m, e: IllegalStateException) => HandledException(e.getMessage) } val middleware = new Middleware[MyCtx] { @@ -903,7 +903,7 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { queryReducers = reducer :: Nil).await result.result.asInstanceOf[Map[String, Any]]("data") should be ( - Map("h1" → "world", "h2" → "world", "error" → null)) + Map("h1" -> "world", "h2" -> "world", "error" -> null)) result.errors should have size 1 result.errors(0).error.getMessage should be ("foo") @@ -920,12 +920,12 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { case class MyListError(message: String) extends Exception(message) val QueryType = ObjectType("Query", fields[MyCtx, Unit]( - Field("hello", StringType, resolve = _ ⇒ "world"))) + Field("hello", StringType, resolve = _ => "world"))) val MutationType = ObjectType("Mutation", fields[MyCtx, Unit]( Field("add", StringType, arguments = Argument("str", StringType) :: Nil, - resolve = c ⇒ UpdateCtx(Future(c.ctx.acc + c.arg[String]("str")))(v ⇒ c.ctx.copy(v))))) + resolve = c => UpdateCtx(Future(c.ctx.acc + c.arg[String]("str")))(v => c.ctx.copy(v))))) val schema = Schema(QueryType, Some(MutationType)) @@ -943,7 +943,7 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { val result = Executor.execute(schema, query, ctx).await result.result.asInstanceOf[Map[String, Any]]("data") should be ( - Map("a1" → "One", "a2" → "OneTwo", "a3" → "OneTwoThree")) + Map("a1" -> "One", "a2" -> "OneTwo", "a3" -> "OneTwoThree")) result.errors should have size 0 @@ -973,8 +973,8 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { """, expectedData = null, expectedErrorStrings = List( - "Cannot spread fragment 'c' within itself." → List(Pos(15, 13)), - "Field 'pic' conflict because they have differing arguments." → List(Pos(11, 13), Pos(7, 13)))) + "Cannot spread fragment 'c' within itself." -> List(Pos(15, 13)), + "Field 'pic' conflict because they have differing arguments." -> List(Pos(11, 13), Pos(7, 13)))) "validate mutually recursive fragments" in checkContainsErrors(schema = Schema(DataType), data = (), userContext = Ctx(), query = @@ -1003,17 +1003,17 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { """, expectedData = null, expectedErrorStrings = List( - "Cannot spread fragment 'c' within itself via 'd'." → List(Pos(15, 13), Pos(19, 13)), - "Field 'pic' conflict because they have differing arguments." → List(Pos(11, 13), Pos(7, 13)))) + "Cannot spread fragment 'c' within itself via 'd'." -> List(Pos(15, 13), Pos(19, 13)), + "Field 'pic' conflict because they have differing arguments." -> List(Pos(11, 13), Pos(7, 13)))) "support `Action.sequence` in queries and mutations" in { val error = new IllegalStateException("foo") - val fetcher = Fetcher[Unit, Int, Int]((_, ids) ⇒ Future.successful(ids.map(_ + 100)))(HasId(_ - 100)) + val fetcher = Fetcher[Unit, Int, Int]((_, ids) => Future.successful(ids.map(_ + 100)))(HasId(_ - 100)) lazy val QueryType = ObjectType("Query", fields[Unit, Unit]( Field("ids", OptionType(ListType(OptionType(IntType))), - resolve = c ⇒ Action.sequence(Seq( + resolve = c => Action.sequence(Seq( LeafAction(Some(1)), LeafAction(None), LeafAction(Some(2)), @@ -1025,13 +1025,13 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { LeafAction(fetcher.deferOpt(8)), LeafAction(Future(fetcher.deferOpt(9))), LeafAction(Future(fetcher.deferOpt(10))) - ).map(_.map(_.map(_ + 10)))).map(vs ⇒ vs.map(_.map(_ + 1)))) + ).map(_.map(_.map(_ + 10)))).map(vs => vs.map(_.map(_ + 1)))) )) val schema = Schema(QueryType, Some(QueryType)) val exceptionHandler = ExceptionHandler { - case (m, e: IllegalStateException) ⇒ HandledException(e.getMessage) + case (m, e: IllegalStateException) => HandledException(e.getMessage) } val queryRes = Executor.execute(schema, graphql"query {ids, foo: ids}", @@ -1042,29 +1042,29 @@ class ExecutorSpec extends WordSpec with Matchers with FutureResultSupport { exceptionHandler = exceptionHandler, deferredResolver = DeferredResolver.fetchers(fetcher)) - Seq(queryRes.await → 8, mutationRes.await → 11) foreach { case (result, offset) ⇒ + Seq(queryRes.await -> 8, mutationRes.await -> 11) foreach { case (result, offset) => result should be ( Map( - "data" → Map( - "ids" → Vector(12, null, 13, 14, 15, 16, 17, 118, 119, 120, 121), - "foo" → Vector(12, null, 13, 14, 15, 16, 17, 118, 119, 120, 121)), - "errors" → Vector( + "data" -> Map( + "ids" -> Vector(12, null, 13, 14, 15, 16, 17, 118, 119, 120, 121), + "foo" -> Vector(12, null, 13, 14, 15, 16, 17, 118, 119, 120, 121)), + "errors" -> Vector( Map( - "message" → "foo", - "path" → Vector("ids"), - "locations" → Vector(Map("line" → 1, "column" → offset))), + "message" -> "foo", + "path" -> Vector("ids"), + "locations" -> Vector(Map("line" -> 1, "column" -> offset))), Map( - "message" → "foo", - "path" → Vector("ids"), - "locations" → Vector(Map("line" → 1, "column" → offset))), + "message" -> "foo", + "path" -> Vector("ids"), + "locations" -> Vector(Map("line" -> 1, "column" -> offset))), Map( - "message" → "foo", - "path" → Vector("foo"), - "locations" → Vector(Map("line" → 1, "column" → (5 + offset)))), + "message" -> "foo", + "path" -> Vector("foo"), + "locations" -> Vector(Map("line" -> 1, "column" -> (5 + offset)))), Map( - "message" → "foo", - "path" → Vector("foo"), - "locations" → Vector(Map("line" → 1, "column" → (5 + offset))))))) + "message" -> "foo", + "path" -> Vector("foo"), + "locations" -> Vector(Map("line" -> 1, "column" -> (5 + offset))))))) } } } diff --git a/src/test/scala/sangria/execution/InputDocumentMaterializerSpec.scala b/src/test/scala/sangria/execution/InputDocumentMaterializerSpec.scala index 57eafe55..7be2053f 100644 --- a/src/test/scala/sangria/execution/InputDocumentMaterializerSpec.scala +++ b/src/test/scala/sangria/execution/InputDocumentMaterializerSpec.scala @@ -49,9 +49,9 @@ class InputDocumentMaterializerSpec extends WordSpec with Matchers with StringMa Some(ScalarType[Any]( name = typeName(definition), description = typeDescription(definition), - coerceUserInput = v ⇒ Right(v), - coerceOutput = (v, _) ⇒ v, - coerceInput = v ⇒ Right(v), + coerceUserInput = v => Right(v), + coerceOutput = (v, _) => v, + coerceInput = v => Right(v), complexity = scalarComplexity(definition), scalarInfo = scalarValueInfo(definition), astDirectives = definition.directives)) @@ -101,12 +101,12 @@ class InputDocumentMaterializerSpec extends WordSpec with Matchers with StringMa val errors = QueryValidator.default.validateInputDocument(schema, inp, "Config") assertViolations(errors, - "Expected type 'Int', found '\"foo\"'. Int value expected" → Seq(Pos(4, 18)), - "Expected type 'Color!', found 'FOO_BAR'. Enum value 'FOO_BAR' is undefined in enum type 'Color'. Known values are: RED, GREEN, BLUE." → Seq(Pos(7, 21)), - "Field 'Foo.baz' of required type 'Color!' was not provided." → Seq(Pos(8, 15)), - "Field 'test' is not defined by type 'Foo'." → Seq(Pos(8, 16)), - "Field 'Foo.baz' of required type 'Color!' was not provided." → Seq(Pos(9, 15)), - "Field 'doo' is not defined by type 'Config'; Did you mean foo?" → Seq(Pos(14, 13))) + "Expected type 'Int', found '\"foo\"'. Int value expected" -> Seq(Pos(4, 18)), + "Expected type 'Color!', found 'FOO_BAR'. Enum value 'FOO_BAR' is undefined in enum type 'Color'. Known values are: RED, GREEN, BLUE." -> Seq(Pos(7, 21)), + "Field 'Foo.baz' of required type 'Color!' was not provided." -> Seq(Pos(8, 15)), + "Field 'test' is not defined by type 'Foo'." -> Seq(Pos(8, 16)), + "Field 'Foo.baz' of required type 'Color!' was not provided." -> Seq(Pos(9, 15)), + "Field 'doo' is not defined by type 'Config'; Did you mean foo?" -> Seq(Pos(14, 13))) } "support `Any` value" in { @@ -149,8 +149,8 @@ class InputDocumentMaterializerSpec extends WordSpec with Matchers with StringMa val errors = QueryValidator.default.validateInputDocument(schema, inp, "Config") assertViolations(errors, - "Field 'Config.test' of required type 'Any!' was not provided." → Seq(Pos(2, 11)), - "Expected type 'Int', found '\"foo\"'. Int value expected" → Seq(Pos(4, 18))) + "Field 'Config.test' of required type 'Any!' was not provided." -> Seq(Pos(2, 11)), + "Expected type 'Int', found '\"foo\"'. Int value expected" -> Seq(Pos(4, 18))) } "support `to` with `FromInput` type class" in { @@ -174,7 +174,7 @@ class InputDocumentMaterializerSpec extends WordSpec with Matchers with StringMa """) val vars = scalaInput(Map( - "comm" → "from variable" + "comm" -> "from variable" )) document.to(ArticleType, vars) should be ( diff --git a/src/test/scala/sangria/execution/ListsSpec.scala b/src/test/scala/sangria/execution/ListsSpec.scala index 60ff2886..ba0615cb 100644 --- a/src/test/scala/sangria/execution/ListsSpec.scala +++ b/src/test/scala/sangria/execution/ListsSpec.scala @@ -16,9 +16,9 @@ class ListsSpec extends WordSpec with Matchers with FutureResultSupport { implicit val validAny = new ValidOutType[Any, Any] {} val data = Data(testData) - lazy val Type: ObjectType[Unit, Data] = ObjectType("DataType", () ⇒ fields[Unit, Data]( + lazy val Type: ObjectType[Unit, Data] = ObjectType("DataType", () => fields[Unit, Data]( Field("test", testType, resolve = _.value.test), - Field("nest", OptionType(Type), resolve = _ ⇒ data) + Field("nest", OptionType(Type), resolve = _ => data) )) val schema = Schema(Type) @@ -26,7 +26,7 @@ class ListsSpec extends WordSpec with Matchers with FutureResultSupport { val Success(doc) = QueryParser.parse("{ nest { test } }") val exceptionHandler = ExceptionHandler { - case (m, e: IllegalStateException) ⇒ HandledException(e.getMessage) + case (m, e: IllegalStateException) => HandledException(e.getMessage) } Executor.execute(schema, doc.copy(sourceMapper = None), root = data, exceptionHandler = exceptionHandler).await should be (expected) @@ -39,30 +39,30 @@ class ListsSpec extends WordSpec with Matchers with FutureResultSupport { val tpe = OptionType(ListType(OptionType(IntType))) "List[T]" should { - "Contains values" in check(tpe, List(Some(1), Some(2)), Map("data" → Map("nest" → Map("test" → List(1, 2))))) - "Contains None" in check(tpe, List(Some(1), None, Some(2)), Map("data" → Map("nest" → Map("test" → List(1, null, 2))))) - "Contains null" in check(tpe, List(1, null, 2), Map("data" → Map("nest" → Map("test" → List(1, null, 2))))) - "Returns None" in check(tpe, None, Map("data" → Map("nest" → Map("test" → null)))) - "Returns null" in check(tpe, Value(null), Map("data" → Map("nest" → Map("test" → null)))) + "Contains values" in check(tpe, List(Some(1), Some(2)), Map("data" -> Map("nest" -> Map("test" -> List(1, 2))))) + "Contains None" in check(tpe, List(Some(1), None, Some(2)), Map("data" -> Map("nest" -> Map("test" -> List(1, null, 2))))) + "Contains null" in check(tpe, List(1, null, 2), Map("data" -> Map("nest" -> Map("test" -> List(1, null, 2))))) + "Returns None" in check(tpe, None, Map("data" -> Map("nest" -> Map("test" -> null)))) + "Returns null" in check(tpe, Value(null), Map("data" -> Map("nest" -> Map("test" -> null)))) } "Future[List[T]]" should { "Contains values" in check(tpe, success(List(Some(1), Some(2))), - Map("data" → Map("nest" → Map("test" → List(1, 2))))) + Map("data" -> Map("nest" -> Map("test" -> List(1, 2))))) "Contains None" in check(tpe, success(List(Some(1), None, Some(2))), - Map("data" → Map("nest" → Map("test" → List(1, null, 2))))) + Map("data" -> Map("nest" -> Map("test" -> List(1, null, 2))))) "Contains null" in check(tpe, success(List(1, null, 2)), - Map("data" → Map("nest" → Map("test" → List(1, null, 2))))) - "Returns None" in check(tpe, success(None), Map("data" → Map("nest" → Map("test" → null)))) - "Returns null" in check(tpe, FutureValue(success(null)), Map("data" → Map("nest" → Map("test" → null)))) + Map("data" -> Map("nest" -> Map("test" -> List(1, null, 2))))) + "Returns None" in check(tpe, success(None), Map("data" -> Map("nest" -> Map("test" -> null)))) + "Returns null" in check(tpe, FutureValue(success(null)), Map("data" -> Map("nest" -> Map("test" -> null)))) "Rejected" in check(tpe, FutureValue(Future.failed(new IllegalStateException("Boom"))), Map( - "data" → Map("nest" → Map("test" → null)), - "errors" → List(Map("message" → "Boom", "path" → List("nest", "test"), "locations" → List(Map("line" → 1, "column" → 10)))))) + "data" -> Map("nest" -> Map("test" -> null)), + "errors" -> List(Map("message" -> "Boom", "path" -> List("nest", "test"), "locations" -> List(Map("line" -> 1, "column" -> 10)))))) } } @@ -70,41 +70,41 @@ class ListsSpec extends WordSpec with Matchers with FutureResultSupport { val tpe = ListType(OptionType(IntType)) "List[T]" should { - "Contains values" in check(tpe, List(Some(1), Some(2)), Map("data" → Map("nest" → Map("test" → List(1, 2))))) - "Contains None" in check(tpe, List(Some(1), None, Some(2)), Map("data" → Map("nest" → Map("test" → List(1, null, 2))))) - "Contains null" in check(tpe, List(1, null, 2), Map("data" → Map("nest" → Map("test" → List(1, null, 2))))) + "Contains values" in check(tpe, List(Some(1), Some(2)), Map("data" -> Map("nest" -> Map("test" -> List(1, 2))))) + "Contains None" in check(tpe, List(Some(1), None, Some(2)), Map("data" -> Map("nest" -> Map("test" -> List(1, null, 2))))) + "Contains null" in check(tpe, List(1, null, 2), Map("data" -> Map("nest" -> Map("test" -> List(1, null, 2))))) "Returns null" in check(tpe, Value(null), Map( - "data" → Map("nest" → null), - "errors" → List(Map( - "message" → "Cannot return null for non-nullable type", - "path" → List("nest", "test"), - "locations" → List(Map("line" → 1, "column" → 10)))))) + "data" -> Map("nest" -> null), + "errors" -> List(Map( + "message" -> "Cannot return null for non-nullable type", + "path" -> List("nest", "test"), + "locations" -> List(Map("line" -> 1, "column" -> 10)))))) } "Future[List[T]]" should { "Contains values" in check(tpe, success(List(Some(1), Some(2))), - Map("data" → Map("nest" → Map("test" → List(1, 2))))) + Map("data" -> Map("nest" -> Map("test" -> List(1, 2))))) "Contains None" in check(tpe, success(List(Some(1), None, Some(2))), - Map("data" → Map("nest" → Map("test" → List(1, null, 2))))) + Map("data" -> Map("nest" -> Map("test" -> List(1, null, 2))))) "Contains null" in check(tpe, success(List(1, null, 2)), - Map("data" → Map("nest" → Map("test" → List(1, null, 2))))) + Map("data" -> Map("nest" -> Map("test" -> List(1, null, 2))))) "Returns null" in check(tpe, FutureValue(success(null)), Map( - "data" → Map("nest" → null), - "errors" → List(Map( - "message" → "Cannot return null for non-nullable type", - "path" → List("nest", "test"), - "locations" → List(Map("line" → 1, "column" → 10)))))) + "data" -> Map("nest" -> null), + "errors" -> List(Map( + "message" -> "Cannot return null for non-nullable type", + "path" -> List("nest", "test"), + "locations" -> List(Map("line" -> 1, "column" -> 10)))))) "Rejected" in check(tpe, FutureValue(Future.failed(new IllegalStateException("Boom"))), Map( - "data" → Map("nest" → null), - "errors" → List(Map( - "message" → "Boom", - "path" → List("nest", "test"), - "locations" → List(Map("line" → 1, "column" → 10)))))) + "data" -> Map("nest" -> null), + "errors" -> List(Map( + "message" -> "Boom", + "path" -> List("nest", "test"), + "locations" -> List(Map("line" -> 1, "column" -> 10)))))) } } @@ -112,34 +112,34 @@ class ListsSpec extends WordSpec with Matchers with FutureResultSupport { val tpe = OptionType(ListType(IntType)) "List[T]" should { - "Contains values" in check(tpe, List(1, 2), Map("data" → Map("nest" → Map("test" → List(1, 2))))) + "Contains values" in check(tpe, List(1, 2), Map("data" -> Map("nest" -> Map("test" -> List(1, 2))))) "Contains null" in check(tpe, List(1, null, 2), Map( - "data" → Map("nest" → Map("test" → null)), - "errors" → List(Map( - "message" → "Cannot return null for non-nullable type", - "path" → List("nest", "test"), - "locations" → List(Map("line" → 1, "column" → 10)))))) - "Returns null" in check(tpe, Value(null), Map("data" → Map("nest" → Map("test" → null)))) + "data" -> Map("nest" -> Map("test" -> null)), + "errors" -> List(Map( + "message" -> "Cannot return null for non-nullable type", + "path" -> List("nest", "test"), + "locations" -> List(Map("line" -> 1, "column" -> 10)))))) + "Returns null" in check(tpe, Value(null), Map("data" -> Map("nest" -> Map("test" -> null)))) } "Future[List[T]]" should { "Contains values" in check(tpe, success(List(1, 2)), - Map("data" → Map("nest" → Map("test" → List(1, 2))))) + Map("data" -> Map("nest" -> Map("test" -> List(1, 2))))) "Contains null" in check(tpe, success(List(1, null, 2)), Map( - "data" → Map("nest" → Map("test" → null)), - "errors" → List(Map( - "message" → "Cannot return null for non-nullable type", - "path" → List("nest", "test"), - "locations" → List(Map("line" → 1, "column" → 10)))))) - "Returns null" in check(tpe, FutureValue(success(null)), Map("data" → Map("nest" → Map("test" → null)))) + "data" -> Map("nest" -> Map("test" -> null)), + "errors" -> List(Map( + "message" -> "Cannot return null for non-nullable type", + "path" -> List("nest", "test"), + "locations" -> List(Map("line" -> 1, "column" -> 10)))))) + "Returns null" in check(tpe, FutureValue(success(null)), Map("data" -> Map("nest" -> Map("test" -> null)))) "Rejected" in check(tpe, FutureValue(Future.failed(new IllegalStateException("Boom"))), Map( - "data" → Map("nest" → Map("test" → null)), - "errors" → List(Map("message" → "Boom", "path" → List("nest", "test"), "locations" → List(Map("line" → 1, "column" → 10)))))) + "data" -> Map("nest" -> Map("test" -> null)), + "errors" -> List(Map("message" -> "Boom", "path" -> List("nest", "test"), "locations" -> List(Map("line" -> 1, "column" -> 10)))))) } } @@ -147,44 +147,44 @@ class ListsSpec extends WordSpec with Matchers with FutureResultSupport { val tpe = ListType(IntType) "List[T]" should { - "Contains values" in check(tpe, List(1, 2), Map("data" → Map("nest" → Map("test" → List(1, 2))))) + "Contains values" in check(tpe, List(1, 2), Map("data" -> Map("nest" -> Map("test" -> List(1, 2))))) "Contains null" in check(tpe, List(1, null, 2), Map( - "data" → Map("nest" → null), - "errors" → List(Map( - "message" → "Cannot return null for non-nullable type", - "path" → List("nest", "test"), - "locations" → List(Map("line" → 1, "column" → 10)))))) + "data" -> Map("nest" -> null), + "errors" -> List(Map( + "message" -> "Cannot return null for non-nullable type", + "path" -> List("nest", "test"), + "locations" -> List(Map("line" -> 1, "column" -> 10)))))) "Returns null" in check(tpe, Value(null), Map( - "data" → Map("nest" → null), - "errors" → List(Map( - "message" → "Cannot return null for non-nullable type", - "path" → List("nest", "test"), - "locations" → List(Map("line" → 1, "column" → 10)))))) + "data" -> Map("nest" -> null), + "errors" -> List(Map( + "message" -> "Cannot return null for non-nullable type", + "path" -> List("nest", "test"), + "locations" -> List(Map("line" -> 1, "column" -> 10)))))) } "Future[List[T]]" should { "Contains values" in check(tpe, success(List(1, 2)), - Map("data" → Map("nest" → Map("test" → List(1, 2))))) + Map("data" -> Map("nest" -> Map("test" -> List(1, 2))))) "Contains null" in check(tpe, success(List(1, null, 2)), Map( - "data" → Map("nest" → null), - "errors" → List(Map( - "message" → "Cannot return null for non-nullable type", - "path" → List("nest", "test"), - "locations" → List(Map("line" → 1, "column" → 10)))))) + "data" -> Map("nest" -> null), + "errors" -> List(Map( + "message" -> "Cannot return null for non-nullable type", + "path" -> List("nest", "test"), + "locations" -> List(Map("line" -> 1, "column" -> 10)))))) "Returns null" in check(tpe, FutureValue(success(null)), Map( - "data" → Map("nest" → null), - "errors" → List(Map( - "message" → "Cannot return null for non-nullable type", - "path" → List("nest", "test"), - "locations" → List(Map("line" → 1, "column" → 10)))))) + "data" -> Map("nest" -> null), + "errors" -> List(Map( + "message" -> "Cannot return null for non-nullable type", + "path" -> List("nest", "test"), + "locations" -> List(Map("line" -> 1, "column" -> 10)))))) "Rejected" in check(tpe, FutureValue(Future.failed(new IllegalStateException("Boom"))), Map( - "data" → Map("nest" → null), - "errors" → List(Map("message" → "Boom", "path" → List("nest", "test"), "locations" → List(Map("line" → 1, "column" → 10)))))) + "data" -> Map("nest" -> null), + "errors" -> List(Map("message" -> "Boom", "path" -> List("nest", "test"), "locations" -> List(Map("line" -> 1, "column" -> 10)))))) } } } diff --git a/src/test/scala/sangria/execution/MiddlewareSpec.scala b/src/test/scala/sangria/execution/MiddlewareSpec.scala index 87fd399a..fc295bc4 100644 --- a/src/test/scala/sangria/execution/MiddlewareSpec.scala +++ b/src/test/scala/sangria/execution/MiddlewareSpec.scala @@ -49,7 +49,7 @@ class MiddlewareSpec extends WordSpec with Matchers with FutureResultSupport { val key = cacheKey(ctx) if (ctx.field.tags.contains(Cached)) - cache.contains(key) → cache.get(cacheKey(ctx)) + cache.contains(key) -> cache.get(cacheKey(ctx)) else noCache } @@ -72,8 +72,8 @@ class MiddlewareSpec extends WordSpec with Matchers with FutureResultSupport { def afterField(cache: QueryVal, fromCache: FieldVal, value: Any, mctx: MiddlewareQueryContext[Any, _, _], ctx: Context[Any, _]) = value match { - case s: String ⇒ Some(s + suffix) - case _ ⇒ None + case s: String => Some(s + suffix) + case _ => None } } @@ -124,33 +124,33 @@ class MiddlewareSpec extends WordSpec with Matchers with FutureResultSupport { } } - val TestObject: ObjectType[Count, Unit] = ObjectType("Test", () ⇒ fields[Count, Unit]( - Field("error", OptionType(StringType), resolve = _ ⇒ throw new IllegalStateException("boom")), - Field("futureError", OptionType(StringType), resolve = _ ⇒ Future.failed[Option[String]](new IllegalStateException("boom"))), - Field("defError", OptionType(StringType), resolve = _ ⇒ Fail), - Field("someString", StringType, resolve = _ ⇒ "nothing special"), - Field("someStringMapped", StringType, resolve = c ⇒ UpdateCtx("unmapped")(_ ⇒ c.ctx) map (_.substring(2))), - Field("errorInAfter", OptionType(StringType), resolve = _ ⇒ "everything ok here"), - Field("errorInBefore", OptionType(StringType), resolve = _ ⇒ "everything ok here"), - Field("anotherString", StringType, resolve = _ ⇒ "foo"), + val TestObject: ObjectType[Count, Unit] = ObjectType("Test", () => fields[Count, Unit]( + Field("error", OptionType(StringType), resolve = _ => throw new IllegalStateException("boom")), + Field("futureError", OptionType(StringType), resolve = _ => Future.failed[Option[String]](new IllegalStateException("boom"))), + Field("defError", OptionType(StringType), resolve = _ => Fail), + Field("someString", StringType, resolve = _ => "nothing special"), + Field("someStringMapped", StringType, resolve = c => UpdateCtx("unmapped")(_ => c.ctx) map (_.substring(2))), + Field("errorInAfter", OptionType(StringType), resolve = _ => "everything ok here"), + Field("errorInBefore", OptionType(StringType), resolve = _ => "everything ok here"), + Field("anotherString", StringType, resolve = _ => "foo"), Field("cachedId", IntType, tags = Cached :: Nil, resolve = _.ctx.count.incrementAndGet()), - Field("delay30", StringType, resolve = _ ⇒ Future { + Field("delay30", StringType, resolve = _ => Future { Thread.sleep(30) "slept for 30ms" }), - Field("nested", TestObject, resolve = _ ⇒ ()) + Field("nested", TestObject, resolve = _ => ()) )) case object Fail extends Deferred[String] class BrokenResolver extends DeferredResolver[Any] { def resolve(deferred: Vector[Deferred[Any]], ctx: Any, queryState: Any)(implicit ec: ExecutionContext) = deferred map { - case Fail ⇒ Future.failed(new IllegalStateException("error in resolver")) + case Fail => Future.failed(new IllegalStateException("error in resolver")) } } val exceptionHandler = ExceptionHandler { - case (m, e: IllegalStateException) ⇒ HandledException(e.getMessage) + case (m, e: IllegalStateException) => HandledException(e.getMessage) } val schema = Schema(TestObject, Some(TestObject)) @@ -192,17 +192,17 @@ class MiddlewareSpec extends WordSpec with Matchers with FutureResultSupport { val res = Executor.execute(schema, query, userContext = ctx, middleware = new CachingMiddleware :: Nil).await res.asInstanceOf[Map[String, Any]]("data") should be (Map( - "cachedId" → 1, - "foo" → 1, - "someString" → "nothing special", - "nested" → Map( - "cachedId" → 1, - "nested" → Map( - "cachedId" → 1)), - "foo" → Map( - "cachedId" → 1, - "nested" → Map( - "cachedId" → 1)))) + "cachedId" -> 1, + "foo" -> 1, + "someString" -> "nothing special", + "nested" -> Map( + "cachedId" -> 1, + "nested" -> Map( + "cachedId" -> 1)), + "foo" -> Map( + "cachedId" -> 1, + "nested" -> Map( + "cachedId" -> 1)))) ctx.count.get() should be (1) } @@ -215,27 +215,27 @@ class MiddlewareSpec extends WordSpec with Matchers with FutureResultSupport { class Resolver extends DeferredResolver[Any] { def resolve(deferred: Vector[Deferred[Any]], ctx: Any, queryState: Any)(implicit ec: ExecutionContext) = deferred map { - case ED ⇒ Future.failed(error("deferred error")) - case SD ⇒ Future.successful(Some("deferred success")) + case ED => Future.failed(error("deferred error")) + case SD => Future.successful(Some("deferred success")) } } - val TestObject = ObjectType("Test", () ⇒ fields[Unit, Unit]( - Field("e1", OptionType(StringType), resolve = _ ⇒ Value(throw error("e1 error"))), - Field("e2", OptionType(StringType), resolve = _ ⇒ TryValue(Failure(error("e2 error")))), - Field("e3", OptionType(StringType), resolve = _ ⇒ FutureValue(Future.failed(error("e3 error")))), - Field("e4", OptionType(StringType), resolve = _ ⇒ DeferredValue(ED)), - Field("e5", OptionType(StringType), resolve = _ ⇒ DeferredFutureValue(Future.successful(ED))), - Field("e6", OptionType(StringType), resolve = _ ⇒ DeferredFutureValue(Future.failed(error("e6 error")))), - Field("e7", OptionType(StringType), resolve = _ ⇒ PartialValue(Some("e7 success"), Vector(error("e71 error"), error("e72 error")))), - Field("e8", OptionType(StringType), resolve = _ ⇒ PartialFutureValue(Future.successful(PartialValue[Unit, Option[String]](Some("e8 success"), Vector(error("e81 error"), error("e82 error")))))), - Field("e9", OptionType(StringType), resolve = _ ⇒ PartialFutureValue(Future.failed(error("e9")))), - - Field("s1", OptionType(StringType), resolve = _ ⇒ Value(Some("s1 success"))), - Field("s2", OptionType(StringType), resolve = _ ⇒ TryValue(Success(Some("s2 success")))), - Field("s3", OptionType(StringType), resolve = _ ⇒ FutureValue(Future.successful(Some("s3 success")))), - Field("s4", OptionType(StringType), resolve = _ ⇒ DeferredValue(SD)), - Field("s5", OptionType(StringType), resolve = _ ⇒ DeferredFutureValue(Future.successful(SD))) + val TestObject = ObjectType("Test", () => fields[Unit, Unit]( + Field("e1", OptionType(StringType), resolve = _ => Value(throw error("e1 error"))), + Field("e2", OptionType(StringType), resolve = _ => TryValue(Failure(error("e2 error")))), + Field("e3", OptionType(StringType), resolve = _ => FutureValue(Future.failed(error("e3 error")))), + Field("e4", OptionType(StringType), resolve = _ => DeferredValue(ED)), + Field("e5", OptionType(StringType), resolve = _ => DeferredFutureValue(Future.successful(ED))), + Field("e6", OptionType(StringType), resolve = _ => DeferredFutureValue(Future.failed(error("e6 error")))), + Field("e7", OptionType(StringType), resolve = _ => PartialValue(Some("e7 success"), Vector(error("e71 error"), error("e72 error")))), + Field("e8", OptionType(StringType), resolve = _ => PartialFutureValue(Future.successful(PartialValue[Unit, Option[String]](Some("e8 success"), Vector(error("e81 error"), error("e82 error")))))), + Field("e9", OptionType(StringType), resolve = _ => PartialFutureValue(Future.failed(error("e9")))), + + Field("s1", OptionType(StringType), resolve = _ => Value(Some("s1 success"))), + Field("s2", OptionType(StringType), resolve = _ => TryValue(Success(Some("s2 success")))), + Field("s3", OptionType(StringType), resolve = _ => FutureValue(Future.successful(Some("s3 success")))), + Field("s4", OptionType(StringType), resolve = _ => DeferredValue(SD)), + Field("s5", OptionType(StringType), resolve = _ => DeferredFutureValue(Future.successful(SD))) )) val schema = Schema(TestObject) @@ -280,20 +280,20 @@ class MiddlewareSpec extends WordSpec with Matchers with FutureResultSupport { exceptionHandler = exceptionHandler).await res.result.asInstanceOf[Map[String, Any]]("data") should be (Map( - "e1" → null, - "e2" → null, - "e3" → null, - "e4" → null, - "e5" → null, - "e6" → null, - "e7" → "e7 success", - "e8" → "e8 success", - "e9" → null, - "s1" → "s1 success", - "s2" → "s2 success", - "s3" → "s3 success", - "s4" → "deferred success", - "s5" → "deferred success")) + "e1" -> null, + "e2" -> null, + "e3" -> null, + "e4" -> null, + "e5" -> null, + "e6" -> null, + "e7" -> "e7 success", + "e8" -> "e8 success", + "e9" -> null, + "s1" -> "s1 success", + "s2" -> "s2 success", + "s3" -> "s3 success", + "s4" -> "deferred success", + "s5" -> "deferred success")) val capture = res.middlewareVals.head._1.asInstanceOf[Capture] @@ -301,24 +301,24 @@ class MiddlewareSpec extends WordSpec with Matchers with FutureResultSupport { "e1", "e2", "e3", "e4", "e5", "e6", "e7", "e8", "e9", "s1", "s2", "s3", "s4", "s5")) capture.after should be (Cache( - "e7" → Set("e7 success"), - "e8" → Set("e8 success"), - "s1" → Set("s1 success"), - "s2" → Set("s2 success"), - "s3" → Set("s3 success"), - "s4" → Set("deferred success"), - "s5" → Set("deferred success"))) + "e7" -> Set("e7 success"), + "e8" -> Set("e8 success"), + "s1" -> Set("s1 success"), + "s2" -> Set("s2 success"), + "s3" -> Set("s3 success"), + "s4" -> Set("deferred success"), + "s5" -> Set("deferred success"))) capture.error should be (Cache( - "e1" → Set("e1 error"), - "e2" → Set("e2 error"), - "e3" → Set("e3 error"), - "e4" → Set("deferred error"), - "e5" → Set("deferred error"), - "e6" → Set("e6 error"), - "e7" → Set("e71 error", "e72 error"), - "e8" → Set("e81 error", "e82 error"), - "e9" → Set("e9"))) + "e1" -> Set("e1 error"), + "e2" -> Set("e2 error"), + "e3" -> Set("e3 error"), + "e4" -> Set("deferred error"), + "e5" -> Set("deferred error"), + "e6" -> Set("e6 error"), + "e7" -> Set("e71 error", "e72 error"), + "e8" -> Set("e81 error", "e82 error"), + "e9" -> Set("e9"))) } "value, updated in middleware `afterField`, should be propagated though all middleware in the chain (effectively should be a fold)" in { @@ -336,9 +336,9 @@ class MiddlewareSpec extends WordSpec with Matchers with FutureResultSupport { middleware = Suffixer(" s1") :: Suffixer(" s2") :: Nil).await res should be (Map( - "data" → Map( - "someString" → "nothing special s2 s1", - "someStringMapped" → "mapped s2 s1"))) + "data" -> Map( + "someString" -> "nothing special s2 s1", + "someStringMapped" -> "mapped s2 s1"))) } "add extensions" in { @@ -372,15 +372,15 @@ class MiddlewareSpec extends WordSpec with Matchers with FutureResultSupport { middleware = new QueryMiddleware("a-") :: new QueryMiddleware("b-") :: Nil).await res should be (Map( - "data" → Map( - "someString" → "nothing special"), - "extensions" → Map( - "a-metrics" → Map( - "test-name" → "test name", - "test-executionTimeMs" → 123), - "b-metrics" → Map( - "test-name" → "test name", - "test-executionTimeMs" → 123)))) + "data" -> Map( + "someString" -> "nothing special"), + "extensions" -> Map( + "a-metrics" -> Map( + "test-name" -> "test name", + "test-executionTimeMs" -> 123), + "b-metrics" -> Map( + "test-name" -> "test name", + "test-executionTimeMs" -> 123)))) } "allow attachments to communicate values with resolve cuntions" in { @@ -397,7 +397,7 @@ class MiddlewareSpec extends WordSpec with Matchers with FutureResultSupport { BeforeFieldResult(attachment = name.map(CurrentUser)) } - val schema = Schema(ObjectType("Test", () ⇒ fields[Unit, Unit]( + val schema = Schema(ObjectType("Test", () => fields[Unit, Unit]( Field("user", OptionType(StringType), resolve = _.attachment[CurrentUser].map(_.userName)), Field("users", ListType(StringType), resolve = _.attachments[CurrentUser].map(_.userName))))) @@ -405,9 +405,9 @@ class MiddlewareSpec extends WordSpec with Matchers with FutureResultSupport { middleware = new QueryMiddleware(Some("foo")) :: new QueryMiddleware(None) :: new QueryMiddleware(Some("bar")) :: Nil).await res should be (Map( - "data" → Map( - "user" → "foo", - "users" → Vector("foo", "bar")))) + "data" -> Map( + "user" -> "foo", + "users" -> Vector("foo", "bar")))) } behave like properFieldLevelMiddleware( @@ -466,32 +466,32 @@ class MiddlewareSpec extends WordSpec with Matchers with FutureResultSupport { exceptionHandler = exceptionHandler).await res.asInstanceOf[Map[String, Any]]("data") should be (Map( - "anotherString" → "foo", - "a" → "something very special!", - "someString" → "something very special!", - "error" → null, - "errorInAfter" → null, - "errorInBefore" → null, - "nested" → Map( - "someString" → "something very special!", - "delay30" → "slept for 30ms", - "error" → null, - "futureError" → null, - "defError" → null, - "nested" → Map( - "error" → null, - "defError" → null)))) + "anotherString" -> "foo", + "a" -> "something very special!", + "someString" -> "something very special!", + "error" -> null, + "errorInAfter" -> null, + "errorInBefore" -> null, + "nested" -> Map( + "someString" -> "something very special!", + "delay30" -> "slept for 30ms", + "error" -> null, + "futureError" -> null, + "defError" -> null, + "nested" -> Map( + "error" -> null, + "defError" -> null)))) ctx.metrics.mapValues(_.size) should be (Map( - "errors" → 7, - "Test.delay30" → 1, - "Test.nested" → 2, - "Test.someString" → 3, - "Test.anotherString" → 1, - "Test.error" → 3, - "Test.futureError" → 1, - "Test.defError" → 2, - "Test.errorInAfter" → 1 + "errors" -> 7, + "Test.delay30" -> 1, + "Test.nested" -> 2, + "Test.someString" -> 3, + "Test.anotherString" -> 1, + "Test.error" -> 3, + "Test.futureError" -> 1, + "Test.defError" -> 2, + "Test.errorInAfter" -> 1 )) } } diff --git a/src/test/scala/sangria/execution/MutationSpec.scala b/src/test/scala/sangria/execution/MutationSpec.scala index 68fdf812..1ccd2b5b 100644 --- a/src/test/scala/sangria/execution/MutationSpec.scala +++ b/src/test/scala/sangria/execution/MutationSpec.scala @@ -18,8 +18,8 @@ class MutationSpec extends WordSpec with Matchers with GraphQlSupport { class Resolver extends DeferredResolver[Any] { def resolve(deferred: Vector[Deferred[Any]], ctx: Any, queryState: Any)(implicit ec: ExecutionContext) = deferred map { - case SuccessfulDefer(n) ⇒ Future.successful(n) - case FailedDefer(_) ⇒ Future.failed(new IllegalStateException("error in resolver")) + case SuccessfulDefer(n) => Future.successful(n) + case FailedDefer(_) => Future.failed(new IllegalStateException("error in resolver")) } } @@ -73,28 +73,28 @@ class MutationSpec extends WordSpec with Matchers with GraphQlSupport { Some(ObjectType("Mutation", fields[UserContext, Root]( Field("immediatelyChangeTheNumber", OptionType(NumberHolderType), arguments = NewNumberArg :: Nil, - resolve = ctx ⇒ UpdateCtx(ctx.value.immediatelyChangeTheNumber(ctx.arg(NewNumberArg)))(v ⇒ ctx.ctx.copy(num = ctx.ctx.num + v.theNumber.get()))), + resolve = ctx => UpdateCtx(ctx.value.immediatelyChangeTheNumber(ctx.arg(NewNumberArg)))(v => ctx.ctx.copy(num = ctx.ctx.num + v.theNumber.get()))), Field("deferChangeTheNumber", OptionType(NumberHolderType), arguments = NewNumberArg :: Nil, - resolve = ctx ⇒ UpdateCtx(SuccessfulDefer(ctx.value.immediatelyChangeTheNumber(ctx.arg(NewNumberArg))))(v ⇒ ctx.ctx.copy(num = ctx.ctx.num + v.theNumber.get()))), + resolve = ctx => UpdateCtx(SuccessfulDefer(ctx.value.immediatelyChangeTheNumber(ctx.arg(NewNumberArg))))(v => ctx.ctx.copy(num = ctx.ctx.num + v.theNumber.get()))), Field("deferFailChangeTheNumber", OptionType(NumberHolderType), arguments = NewNumberArg :: Nil, - resolve = ctx ⇒ UpdateCtx(FailedDefer(ctx.value.immediatelyChangeTheNumber(ctx.arg(NewNumberArg))))(v ⇒ ctx.ctx.copy(num = ctx.ctx.num + v.theNumber.get()))), + resolve = ctx => UpdateCtx(FailedDefer(ctx.value.immediatelyChangeTheNumber(ctx.arg(NewNumberArg))))(v => ctx.ctx.copy(num = ctx.ctx.num + v.theNumber.get()))), Field("deferFutChangeTheNumber", OptionType(NumberHolderType), arguments = NewNumberArg :: Nil, - resolve = ctx ⇒ UpdateCtx(DeferredFutureValue(Future.successful(SuccessfulDefer(ctx.value.immediatelyChangeTheNumber(ctx.arg(NewNumberArg))))))(v ⇒ ctx.ctx.copy(num = ctx.ctx.num + v.theNumber.get()))), + resolve = ctx => UpdateCtx(DeferredFutureValue(Future.successful(SuccessfulDefer(ctx.value.immediatelyChangeTheNumber(ctx.arg(NewNumberArg))))))(v => ctx.ctx.copy(num = ctx.ctx.num + v.theNumber.get()))), Field("deferFutFailChangeTheNumber", OptionType(NumberHolderType), arguments = NewNumberArg :: Nil, - resolve = ctx ⇒ UpdateCtx(DeferredFutureValue(Future.successful(FailedDefer(ctx.value.immediatelyChangeTheNumber(ctx.arg(NewNumberArg))))))(v ⇒ ctx.ctx.copy(num = ctx.ctx.num + v.theNumber.get()))), + resolve = ctx => UpdateCtx(DeferredFutureValue(Future.successful(FailedDefer(ctx.value.immediatelyChangeTheNumber(ctx.arg(NewNumberArg))))))(v => ctx.ctx.copy(num = ctx.ctx.num + v.theNumber.get()))), Field("promiseToChangeTheNumber", OptionType(NumberHolderType), arguments = NewNumberArg :: Nil, - resolve = ctx ⇒ UpdateCtx(ctx.value.promiseToChangeTheNumber(ctx.arg(NewNumberArg)))(v ⇒ ctx.ctx.copy(num = ctx.ctx.num + v.theNumber.get()))), + resolve = ctx => UpdateCtx(ctx.value.promiseToChangeTheNumber(ctx.arg(NewNumberArg)))(v => ctx.ctx.copy(num = ctx.ctx.num + v.theNumber.get()))), Field("failToChangeTheNumber", OptionType(NumberHolderType), arguments = NewNumberArg :: Nil, - resolve = ctx ⇒ UpdateCtx(ctx.value.failToChangeTheNumber(ctx.arg(NewNumberArg)))(v ⇒ ctx.ctx.copy(num = ctx.ctx.num + v.theNumber.get()))), + resolve = ctx => UpdateCtx(ctx.value.failToChangeTheNumber(ctx.arg(NewNumberArg)))(v => ctx.ctx.copy(num = ctx.ctx.num + v.theNumber.get()))), Field("promiseAndFailToChangeTheNumber", OptionType(NumberHolderType), arguments = NewNumberArg :: Nil, - resolve = ctx ⇒ UpdateCtx(ctx.value.promiseAndFailToChangeTheNumber(ctx.arg(NewNumberArg)))(v ⇒ ctx.ctx.copy(num = ctx.ctx.num + v.theNumber.get()))) + resolve = ctx => UpdateCtx(ctx.value.promiseAndFailToChangeTheNumber(ctx.arg(NewNumberArg)))(v => ctx.ctx.copy(num = ctx.ctx.num + v.theNumber.get()))) ))) ) @@ -134,28 +134,28 @@ class MutationSpec extends WordSpec with Matchers with GraphQlSupport { } """, Map( - "data" → Map( - "first" → Map( - "theNumber" → 1, - "userCtx" → 11), - "second" → Map( - "theNumber" → 2, - "userCtx" → 13), - "third" → Map( - "theNumber" → 3, - "userCtx" → 16), - "fourth" → Map( - "theNumber" → 4, - "userCtx" → 20), - "fifth" → Map( - "theNumber" → 5, - "userCtx" → 25), - "def" → Map( - "theNumber" → 6, - "userCtx" → 31), - "defFut" → Map( - "theNumber" → 7, - "userCtx" → 38) + "data" -> Map( + "first" -> Map( + "theNumber" -> 1, + "userCtx" -> 11), + "second" -> Map( + "theNumber" -> 2, + "userCtx" -> 13), + "third" -> Map( + "theNumber" -> 3, + "userCtx" -> 16), + "fourth" -> Map( + "theNumber" -> 4, + "userCtx" -> 20), + "fifth" -> Map( + "theNumber" -> 5, + "userCtx" -> 25), + "def" -> Map( + "theNumber" -> 6, + "userCtx" -> 31), + "defFut" -> Map( + "theNumber" -> 7, + "userCtx" -> 38) ) ), userContext = UserContext(10), @@ -202,46 +202,46 @@ class MutationSpec extends WordSpec with Matchers with GraphQlSupport { } """, Map( - "first" → Map( - "theNumber" → 1, - "userCtx" → 11), - "second" → Map( - "theNumber" → 2, - "userCtx" → 13), - "third" → null, - "fourth" → Map( - "theNumber" → 4, - "userCtx" → 17), - "fifth" → Map( - "theNumber" → 5, - "userCtx" → 22), - "sixth" → null, - "def" → Map( - "theNumber" → 7, - "userCtx" → 29), - "defFail" → null, - "defFut" → Map( - "theNumber" → 9, - "userCtx" → 38), - "defFutFail" → null, - "def1" → Map( - "theNumber" → 11, - "userCtx" → 49) + "first" -> Map( + "theNumber" -> 1, + "userCtx" -> 11), + "second" -> Map( + "theNumber" -> 2, + "userCtx" -> 13), + "third" -> null, + "fourth" -> Map( + "theNumber" -> 4, + "userCtx" -> 17), + "fifth" -> Map( + "theNumber" -> 5, + "userCtx" -> 22), + "sixth" -> null, + "def" -> Map( + "theNumber" -> 7, + "userCtx" -> 29), + "defFail" -> null, + "defFut" -> Map( + "theNumber" -> 9, + "userCtx" -> 38), + "defFutFail" -> null, + "def1" -> Map( + "theNumber" -> 11, + "userCtx" -> 49) ), List( Map( - "message" → "Cannot change the number", - "path" → List("third"), - "locations" → List(Map("line" → 9, "column" → 11))), - Map("message" → "Cannot change the number", - "path" → List("sixth"), - "locations" → List(Map("line" → 18, "column" → 11))), - Map("message" → "error in resolver", - "path" → List("defFail"), - "locations" → List(Map("line" → 24, "column" → 11))), - Map("message" → "error in resolver", - "path" → List("defFutFail"), - "locations" → List(Map("line" → 30, "column" → 11)))), + "message" -> "Cannot change the number", + "path" -> List("third"), + "locations" -> List(Map("line" -> 9, "column" -> 11))), + Map("message" -> "Cannot change the number", + "path" -> List("sixth"), + "locations" -> List(Map("line" -> 18, "column" -> 11))), + Map("message" -> "error in resolver", + "path" -> List("defFail"), + "locations" -> List(Map("line" -> 24, "column" -> 11))), + Map("message" -> "error in resolver", + "path" -> List("defFutFail"), + "locations" -> List(Map("line" -> 30, "column" -> 11)))), userContext = UserContext(10), resolver = new Resolver ) @@ -257,13 +257,13 @@ class MutationSpec extends WordSpec with Matchers with GraphQlSupport { val mutation = ObjectType("Mutation", fields[String, Unit]( Field("updateSimple", child, arguments = AddArg :: Nil, - resolve = c ⇒ UpdateCtx(c.ctx + " " + c.arg(AddArg))(v ⇒ v + " ctx ").map(v ⇒ v + " map")), + resolve = c => UpdateCtx(c.ctx + " " + c.arg(AddArg))(v => v + " ctx ").map(v => v + " map")), Field("updateFuture", child, arguments = AddArg :: Nil, - resolve = c ⇒ UpdateCtx(Future.successful(c.ctx + " " + c.arg(AddArg)))(v ⇒ v + " ctx ").map(v ⇒ v + " map")), + resolve = c => UpdateCtx(Future.successful(c.ctx + " " + c.arg(AddArg)))(v => v + " ctx ").map(v => v + " map")), Field("updateTry", child, arguments = AddArg :: Nil, - resolve = c ⇒ UpdateCtx(Success(c.ctx + " " + c.arg(AddArg)))(v ⇒ v + " ctx ").map(v ⇒ v + " map")) + resolve = c => UpdateCtx(Success(c.ctx + " " + c.arg(AddArg)))(v => v + " ctx ").map(v => v + " map")) )) val schema = Schema(mutation, Some(mutation)) @@ -278,11 +278,11 @@ class MutationSpec extends WordSpec with Matchers with GraphQlSupport { c: updateTry(add: "c") {ctx, val} } """, - Map("data" → + Map("data" -> Map( - "a" → Map("ctx" → "root a ctx ", "val" → "root a map"), - "b" → Map("ctx" → "root a ctx b ctx ", "val" → "root a ctx b map"), - "c" → Map("ctx" → "root a ctx b ctx c ctx ", "val" → "root a ctx b ctx c map"))), + "a" -> Map("ctx" -> "root a ctx ", "val" -> "root a map"), + "b" -> Map("ctx" -> "root a ctx b ctx ", "val" -> "root a ctx b map"), + "c" -> Map("ctx" -> "root a ctx b ctx c ctx ", "val" -> "root a ctx b ctx c map"))), userContext = "root") SimpleGraphQlSupport.check( @@ -295,11 +295,11 @@ class MutationSpec extends WordSpec with Matchers with GraphQlSupport { c: updateTry(add: "c") {ctx, val} } """, - Map("data" → + Map("data" -> Map( - "a" → Map("ctx" → "root a ctx ", "val" → "root a map"), - "b" → Map("ctx" → "root b ctx ", "val" → "root b map"), - "c" → Map("ctx" → "root c ctx ", "val" → "root c map"))), + "a" -> Map("ctx" -> "root a ctx ", "val" -> "root a map"), + "b" -> Map("ctx" -> "root b ctx ", "val" -> "root b map"), + "c" -> Map("ctx" -> "root c ctx ", "val" -> "root c map"))), userContext = "root") } } diff --git a/src/test/scala/sangria/execution/NotNullSpec.scala b/src/test/scala/sangria/execution/NotNullSpec.scala index 016bcc90..f06556a3 100644 --- a/src/test/scala/sangria/execution/NotNullSpec.scala +++ b/src/test/scala/sangria/execution/NotNullSpec.scala @@ -41,7 +41,7 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G def nonNullPromiseNest = Future.successful(new NullingSubject) } - val DataType: ObjectType[Unit, TestSubject] = ObjectType("DataType", () ⇒ fields[Unit, TestSubject]( + val DataType: ObjectType[Unit, TestSubject] = ObjectType("DataType", () => fields[Unit, TestSubject]( Field("sync", OptionType(StringType), resolve = _.value.sync), Field("nonNullSync", StringType, resolve = _.value.nonNullSync), Field("promise", OptionType(StringType), resolve = _.value.promise), @@ -50,10 +50,10 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G Field("nonNullNest", DataType, resolve = _.value.nonNullNest), Field("promiseNest", OptionType(DataType), resolve = _.value.promiseNest), Field("nonNullPromiseNest", DataType, resolve = _.value.nonNullPromiseNest), - Field("NaN", OptionType(FloatType), resolve = _ ⇒ Some(Double.NaN)), - Field("Inf", OptionType(FloatType), resolve = _ ⇒ Some(Double.PositiveInfinity)), - Field("nonNullNaN", FloatType, resolve = _ ⇒ Double.NaN), - Field("nonNullInf", FloatType, resolve = _ ⇒ Double.PositiveInfinity))) + Field("NaN", OptionType(FloatType), resolve = _ => Some(Double.NaN)), + Field("Inf", OptionType(FloatType), resolve = _ => Some(Double.PositiveInfinity)), + Field("nonNullNaN", FloatType, resolve = _ => Double.NaN), + Field("nonNullInf", FloatType, resolve = _ => Double.PositiveInfinity))) val schema = Schema(DataType) @@ -66,8 +66,8 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G } """, Map( - "data" → Map("sync" → null), - "errors" → List(Map("message" → "sync", "path" → List("sync"), "locations" → List(Map("line" → 3, "column" → 11)))))) + "data" -> Map("sync" -> null), + "errors" -> List(Map("message" -> "sync", "path" -> List("sync"), "locations" -> List(Map("line" -> 3, "column" -> 11)))))) "nulls a nullable field that throws in a promise" in check( new ThrowingSubject, @@ -77,8 +77,8 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G } """, Map( - "data" → Map("promise" → null), - "errors" → List(Map("message" → "promise", "path" → List("promise"), "locations" → List(Map("line" → 3, "column" → 11)))))) + "data" -> Map("promise" -> null), + "errors" -> List(Map("message" -> "promise", "path" -> List("promise"), "locations" -> List(Map("line" -> 3, "column" -> 11)))))) "nulls a synchronously returned object that contains a non-nullable field that throws synchronously" in check( new ThrowingSubject, @@ -90,8 +90,8 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G } """, Map( - "data" → Map("nest" → null), - "errors" → List(Map("message" → "nonNullSync", "path" → List("nest", "nonNullSync"), "locations" → List(Map("line" → 4, "column" → 13)))))) + "data" -> Map("nest" -> null), + "errors" -> List(Map("message" -> "nonNullSync", "path" -> List("nest", "nonNullSync"), "locations" -> List(Map("line" -> 4, "column" -> 13)))))) "nulls a synchronously returned object that contains a non-nullable field that throws in a promise" in check( new ThrowingSubject, @@ -103,8 +103,8 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G } """, Map( - "data" → Map("nest" → null), - "errors" → List(Map("message" → "nonNullPromise", "path" → List("nest", "nonNullPromise"), "locations" → List(Map("line" → 4, "column" → 13)))))) + "data" -> Map("nest" -> null), + "errors" -> List(Map("message" -> "nonNullPromise", "path" -> List("nest", "nonNullPromise"), "locations" -> List(Map("line" -> 4, "column" -> 13)))))) "nulls an object returned in a promise that contains a non-nullable field that throws synchronously" in check( new ThrowingSubject, @@ -116,8 +116,8 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G } """, Map( - "data" → Map("promiseNest" → null), - "errors" → List(Map("message" → "nonNullSync", "path" → List("promiseNest", "nonNullSync"), "locations" → List(Map("line" → 4, "column" → 13)))))) + "data" -> Map("promiseNest" -> null), + "errors" -> List(Map("message" -> "nonNullSync", "path" -> List("promiseNest", "nonNullSync"), "locations" -> List(Map("line" -> 4, "column" -> 13)))))) "nulls an object returned in a promise that contains a non-nullable field that throws in a promise" in check( new ThrowingSubject, @@ -129,8 +129,8 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G } """, Map( - "data" → Map("promiseNest" → null), - "errors" → List(Map("message" → "nonNullPromise", "path" → List("promiseNest", "nonNullPromise"), "locations" → List(Map("line" → 4, "column" → 13)))))) + "data" -> Map("promiseNest" -> null), + "errors" -> List(Map("message" -> "nonNullPromise", "path" -> List("promiseNest", "nonNullPromise"), "locations" -> List(Map("line" -> 4, "column" -> 13)))))) "nulls a complex tree of nullable fields that throw" in checkErrors( new ThrowingSubject, @@ -163,44 +163,44 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G } """, Map( - "nest" → Map( - "sync" → null, - "promise" → null, - "nest" → Map( - "sync" → null, - "promise" → null + "nest" -> Map( + "sync" -> null, + "promise" -> null, + "nest" -> Map( + "sync" -> null, + "promise" -> null ), - "promiseNest" → Map( - "sync" → null, - "promise" → null + "promiseNest" -> Map( + "sync" -> null, + "promise" -> null ) ), - "promiseNest" → Map( - "sync" → null, - "promise" → null, - "nest" → Map( - "sync" → null, - "promise" → null + "promiseNest" -> Map( + "sync" -> null, + "promise" -> null, + "nest" -> Map( + "sync" -> null, + "promise" -> null ), - "promiseNest" → Map( - "sync" → null, - "promise" → null + "promiseNest" -> Map( + "sync" -> null, + "promise" -> null ) ) ), List( - Map("message" → "sync", "path" → List("nest", "sync"), "locations" → List(Map("line" → 4, "column" → 13))), - Map("message" → "sync", "path" → List("nest", "nest", "sync"), "locations" → List(Map("line" → 7, "column" → 15))), - Map("message" → "sync", "path" → List("nest", "promiseNest", "sync"), "locations" → List(Map("line" → 11, "column" → 15))), - Map("message" → "sync", "path" → List("promiseNest", "sync"), "locations" → List(Map("line" → 16, "column" → 13))), - Map("message" → "sync", "path" → List("promiseNest", "nest", "sync"), "locations" → List(Map("line" → 19, "column" → 15))), - Map("message" → "sync", "path" → List("promiseNest", "promiseNest", "sync"), "locations" → List(Map("line" → 23, "column" → 15))), - Map("message" → "promise", "path" → List("nest", "promise"), "locations" → List(Map("line" → 5, "column" → 13))), - Map("message" → "promise", "path" → List("nest", "nest", "promise"), "locations" → List(Map("line" → 8, "column" → 15))), - Map("message" → "promise", "path" → List("nest", "promiseNest", "promise"), "locations" → List(Map("line" → 12, "column" → 15))), - Map("message" → "promise", "path" → List("promiseNest", "promise"), "locations" → List(Map("line" → 17, "column" → 13))), - Map("message" → "promise", "path" → List("promiseNest", "nest", "promise"), "locations" → List(Map("line" → 20, "column" → 15))), - Map("message" → "promise", "path" → List("promiseNest", "promiseNest", "promise"), "locations" → List(Map("line" → 24, "column" → 15))) + Map("message" -> "sync", "path" -> List("nest", "sync"), "locations" -> List(Map("line" -> 4, "column" -> 13))), + Map("message" -> "sync", "path" -> List("nest", "nest", "sync"), "locations" -> List(Map("line" -> 7, "column" -> 15))), + Map("message" -> "sync", "path" -> List("nest", "promiseNest", "sync"), "locations" -> List(Map("line" -> 11, "column" -> 15))), + Map("message" -> "sync", "path" -> List("promiseNest", "sync"), "locations" -> List(Map("line" -> 16, "column" -> 13))), + Map("message" -> "sync", "path" -> List("promiseNest", "nest", "sync"), "locations" -> List(Map("line" -> 19, "column" -> 15))), + Map("message" -> "sync", "path" -> List("promiseNest", "promiseNest", "sync"), "locations" -> List(Map("line" -> 23, "column" -> 15))), + Map("message" -> "promise", "path" -> List("nest", "promise"), "locations" -> List(Map("line" -> 5, "column" -> 13))), + Map("message" -> "promise", "path" -> List("nest", "nest", "promise"), "locations" -> List(Map("line" -> 8, "column" -> 15))), + Map("message" -> "promise", "path" -> List("nest", "promiseNest", "promise"), "locations" -> List(Map("line" -> 12, "column" -> 15))), + Map("message" -> "promise", "path" -> List("promiseNest", "promise"), "locations" -> List(Map("line" -> 17, "column" -> 13))), + Map("message" -> "promise", "path" -> List("promiseNest", "nest", "promise"), "locations" -> List(Map("line" -> 20, "column" -> 15))), + Map("message" -> "promise", "path" -> List("promiseNest", "promiseNest", "promise"), "locations" -> List(Map("line" -> 24, "column" -> 15))) )) "nulls the first nullable object after a field throws in a long chain of fields that are non-null" in checkErrors( @@ -254,28 +254,28 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G } """, Map( - "nest" → null, - "promiseNest" → null, - "anotherNest" → null, - "anotherPromiseNest" → null + "nest" -> null, + "promiseNest" -> null, + "anotherNest" -> null, + "anotherPromiseNest" -> null ), List( Map( - "message" → "nonNullSync", - "path" → List("nest", "nonNullNest", "nonNullPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullSync"), - "locations" → List(Map("line" → 8, "column" → 21))), + "message" -> "nonNullSync", + "path" -> List("nest", "nonNullNest", "nonNullPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullSync"), + "locations" -> List(Map("line" -> 8, "column" -> 21))), Map( - "message" → "nonNullSync", - "path" → List("promiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullSync"), - "locations" → List(Map("line" → 19, "column" → 21))), + "message" -> "nonNullSync", + "path" -> List("promiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullSync"), + "locations" -> List(Map("line" -> 19, "column" -> 21))), Map( - "message" → "nonNullPromise", - "path" → List("anotherNest", "nonNullNest", "nonNullPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullPromise"), - "locations" → List(Map("line" → 30, "column" → 21))), + "message" -> "nonNullPromise", + "path" -> List("anotherNest", "nonNullNest", "nonNullPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullPromise"), + "locations" -> List(Map("line" -> 30, "column" -> 21))), Map( - "message" → "nonNullPromise", - "path" → List("anotherPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullPromise"), - "locations" → List(Map("line" → 41, "column" → 21))) + "message" -> "nonNullPromise", + "path" -> List("anotherPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullPromise"), + "locations" -> List(Map("line" -> 41, "column" -> 21))) )) "nulls a nullable field that synchronously returns null" in check( @@ -285,7 +285,7 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G sync } """, - Map("data" → Map("sync" → null))) + Map("data" -> Map("sync" -> null))) "nulls a nullable field that returns null in a promise" in check( new NullingSubject, @@ -294,7 +294,7 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G promise } """, - Map("data" → Map("promise" → null))) + Map("data" -> Map("promise" -> null))) "nulls a synchronously returned object that contains a non-nullable field that returns null synchronously" in check( new NullingSubject, @@ -306,11 +306,11 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G } """, Map( - "data" → Map("nest" → null), - "errors" → List(Map( - "message" → "Cannot return null for non-nullable type", - "path" → List("nest", "nonNullSync"), - "locations" → List(Map("line" → 4, "column" → 13)))))) + "data" -> Map("nest" -> null), + "errors" -> List(Map( + "message" -> "Cannot return null for non-nullable type", + "path" -> List("nest", "nonNullSync"), + "locations" -> List(Map("line" -> 4, "column" -> 13)))))) "nulls a synchronously returned object that contains a non-nullable field that returns null in a promise" in check( new NullingSubject, @@ -322,11 +322,11 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G } """, Map( - "data" → Map("nest" → null), - "errors" → List(Map( - "message" → "Cannot return null for non-nullable type", - "path" → List("nest", "nonNullPromise"), - "locations" → List(Map("line" → 4, "column" → 13)))))) + "data" -> Map("nest" -> null), + "errors" -> List(Map( + "message" -> "Cannot return null for non-nullable type", + "path" -> List("nest", "nonNullPromise"), + "locations" -> List(Map("line" -> 4, "column" -> 13)))))) "nulls an object returned in a promise that contains a non-nullable field that returns null synchronously" in check( new NullingSubject, @@ -338,11 +338,11 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G } """, Map( - "data" → Map("promiseNest" → null), - "errors" → List(Map( - "message" → "Cannot return null for non-nullable type", - "path" → List("promiseNest", "nonNullSync"), - "locations" → List(Map("line" → 4, "column" → 13)))))) + "data" -> Map("promiseNest" -> null), + "errors" -> List(Map( + "message" -> "Cannot return null for non-nullable type", + "path" -> List("promiseNest", "nonNullSync"), + "locations" -> List(Map("line" -> 4, "column" -> 13)))))) "nulls an object returned in a promise that contains a non-nullable field that returns null ina a promise" in check( new NullingSubject, @@ -354,11 +354,11 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G } """, Map( - "data" → Map("promiseNest" → null), - "errors" → List(Map( - "message" → "Cannot return null for non-nullable type", - "path" → List("promiseNest", "nonNullPromise"), - "locations" → List(Map("line" → 4, "column" → 13)))))) + "data" -> Map("promiseNest" -> null), + "errors" -> List(Map( + "message" -> "Cannot return null for non-nullable type", + "path" -> List("promiseNest", "nonNullPromise"), + "locations" -> List(Map("line" -> 4, "column" -> 13)))))) "nulls a complex tree of nullable fields that return null" in checkErrors( new NullingSubject, @@ -391,28 +391,28 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G } """, Map( - "nest" → Map( - "sync" → null, - "promise" → null, - "nest" → Map( - "sync" → null, - "promise" → null + "nest" -> Map( + "sync" -> null, + "promise" -> null, + "nest" -> Map( + "sync" -> null, + "promise" -> null ), - "promiseNest" → Map( - "sync" → null, - "promise" → null + "promiseNest" -> Map( + "sync" -> null, + "promise" -> null ) ), - "promiseNest" → Map( - "sync" → null, - "promise" → null, - "nest" → Map( - "sync" → null, - "promise" → null + "promiseNest" -> Map( + "sync" -> null, + "promise" -> null, + "nest" -> Map( + "sync" -> null, + "promise" -> null ), - "promiseNest" → Map( - "sync" → null, - "promise" → null + "promiseNest" -> Map( + "sync" -> null, + "promise" -> null ) ) ), @@ -469,88 +469,88 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G } """, Map( - "nest" → null, - "promiseNest" → null, - "anotherNest" → null, - "anotherPromiseNest" → null + "nest" -> null, + "promiseNest" -> null, + "anotherNest" -> null, + "anotherPromiseNest" -> null ), List( Map( - "message" → "Cannot return null for non-nullable type", - "path" → List("nest", "nonNullNest", "nonNullPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullSync"), - "locations" → List(Map("line" → 8, "column" → 21))), + "message" -> "Cannot return null for non-nullable type", + "path" -> List("nest", "nonNullNest", "nonNullPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullSync"), + "locations" -> List(Map("line" -> 8, "column" -> 21))), Map( - "message" → "Cannot return null for non-nullable type", - "path" → List("promiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullSync"), - "locations" → List(Map("line" → 19, "column" → 21))), + "message" -> "Cannot return null for non-nullable type", + "path" -> List("promiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullSync"), + "locations" -> List(Map("line" -> 19, "column" -> 21))), Map( - "message" → "Cannot return null for non-nullable type", - "path" → List("anotherNest", "nonNullNest", "nonNullPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullPromise"), - "locations" → List(Map("line" → 30, "column" → 21))), + "message" -> "Cannot return null for non-nullable type", + "path" -> List("anotherNest", "nonNullNest", "nonNullPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullPromise"), + "locations" -> List(Map("line" -> 30, "column" -> 21))), Map( - "message" → "Cannot return null for non-nullable type", - "path" → List("anotherPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullPromise"), - "locations" → List(Map("line" → 41, "column" → 21))) + "message" -> "Cannot return null for non-nullable type", + "path" -> List("anotherPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullNest", "nonNullPromiseNest", "nonNullPromise"), + "locations" -> List(Map("line" -> 41, "column" -> 21))) )) "nulls the top level if sync non-nullable field throws" in check( new ThrowingSubject, "query Q { nonNullSync }", Map( - "data" → null, - "errors" → List(Map("message" → "nonNullSync", "path" → List("nonNullSync"), "locations" → List(Map("line" → 1, "column" → 11)))))) + "data" -> null, + "errors" -> List(Map("message" -> "nonNullSync", "path" -> List("nonNullSync"), "locations" -> List(Map("line" -> 1, "column" -> 11)))))) "nulls the top level if async non-nullable field errors" in check( new ThrowingSubject, "query Q { nonNullPromise }", Map( - "data" → null, - "errors" → List(Map("message" → "nonNullPromise", "path" → List("nonNullPromise"), "locations" → List(Map("line" → 1, "column" → 11)))))) + "data" -> null, + "errors" -> List(Map("message" -> "nonNullPromise", "path" -> List("nonNullPromise"), "locations" -> List(Map("line" -> 1, "column" -> 11)))))) "nulls the top level if sync non-nullable field returns null" in check( new NullingSubject, "query Q { nonNullSync }", Map( - "data" → null, - "errors" → List(Map("message" → "Cannot return null for non-nullable type", "path" → List("nonNullSync"), "locations" → List(Map("line" → 1, "column" → 11)))))) + "data" -> null, + "errors" -> List(Map("message" -> "Cannot return null for non-nullable type", "path" -> List("nonNullSync"), "locations" -> List(Map("line" -> 1, "column" -> 11)))))) "nulls the top level if async non-nullable field resolves null" in check( new NullingSubject, "query Q { nonNullPromise }", Map( - "data" → null, - "errors" → List(Map("message" → "Cannot return null for non-nullable type", "path" → List("nonNullPromise"), "locations" → List(Map("line" → 1, "column" → 11)))))) + "data" -> null, + "errors" -> List(Map("message" -> "Cannot return null for non-nullable type", "path" -> List("nonNullPromise"), "locations" -> List(Map("line" -> 1, "column" -> 11)))))) "nulls the top level if non-nullable field resolves NaN" in check( new NullingSubject, "query Q { nonNullNaN }", Map( - "data" → null, - "errors" → List(Map("message" → "Cannot return null for non-nullable type", "path" → List("nonNullNaN"), "locations" → List(Map("line" → 1, "column" → 11)))))) + "data" -> null, + "errors" -> List(Map("message" -> "Cannot return null for non-nullable type", "path" -> List("nonNullNaN"), "locations" -> List(Map("line" -> 1, "column" -> 11)))))) "nulls the top level if non-nullable field resolves infinity" in check( new NullingSubject, "query Q { nonNullInf }", Map( - "data" → null, - "errors" → List(Map("message" → "Cannot return null for non-nullable type", "path" → List("nonNullInf"), "locations" → List(Map("line" → 1, "column" → 11)))))) + "data" -> null, + "errors" -> List(Map("message" -> "Cannot return null for non-nullable type", "path" -> List("nonNullInf"), "locations" -> List(Map("line" -> 1, "column" -> 11)))))) "treats infinity as `null`" in check( new NullingSubject, "query Q { Inf }", - Map("data" → Map("Inf" → null))) + Map("data" -> Map("Inf" -> null))) "treats NaN as `null`" in check( new NullingSubject, "query Q { NaN }", - Map("data" → Map("NaN" → null))) + Map("data" -> Map("NaN" -> null))) "Handles non-null argument" should { val CannotBeNullArg = Argument("cannotBeNull", StringType) val schemaWithNonNullArg = Schema(ObjectType("Query", fields[Unit, Unit]( Field("withNonNullArg", OptionType(StringType), arguments = CannotBeNullArg :: Nil, - resolve = c ⇒ s"Passed: ${c arg CannotBeNullArg}")))) + resolve = c => s"Passed: ${c arg CannotBeNullArg}")))) "succeeds when passed non-null literal value" in SimpleGraphQlSupport.check(schemaWithNonNullArg, (), """ @@ -558,8 +558,8 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G withNonNullArg (cannotBeNull: $testVar) } """, - args = JsObject("testVar" → JsString("variable value")), - expected = Map("data" → Map("withNonNullArg" → "Passed: variable value"))) + args = JsObject("testVar" -> JsString("variable value")), + expected = Map("data" -> Map("withNonNullArg" -> "Passed: variable value"))) "succeeds when missing variable has default value" in SimpleGraphQlSupport.check(schemaWithNonNullArg, (), """ @@ -568,7 +568,7 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G } """, args = JsObject(/* Intentionally missing variable */), - expected = Map("data" → Map("withNonNullArg" → "Passed: default value"))) + expected = Map("data" -> Map("withNonNullArg" -> "Passed: default value"))) // Note: validation should identify this issue first (missing args rule) // however execution should still protect against this. @@ -579,12 +579,12 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G } """, Map( - "data" → Map("withNonNullArg" → null), - "errors" → Vector( + "data" -> Map("withNonNullArg" -> null), + "errors" -> Vector( Map( - "message" → "Null value was provided for the NotNull Type 'String!' at path 'cannotBeNull'.", - "locations" → Vector(Map("line" → 3, "column" → 13)), - "path" → Vector("withNonNullArg")))), + "message" -> "Null value was provided for the NotNull Type 'String!' at path 'cannotBeNull'.", + "locations" -> Vector(Map("line" -> 3, "column" -> 13)), + "path" -> Vector("withNonNullArg")))), validateQuery = false) // Note: validation should identify this issue first (values of correct @@ -596,14 +596,14 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G } """, Map( - "data" → Map("withNonNullArg" → null), - "errors" → Vector( + "data" -> Map("withNonNullArg" -> null), + "errors" -> Vector( Map( - "message" → "Argument 'cannotBeNull' has wrong value: Null value was provided for the NotNull Type 'String!' at path 'cannotBeNull'..", - "path" → Vector("withNonNullArg"), - "locations" → Vector( - Map("line" → 3, "column" → 13), - Map("line" → 3, "column" → 42))))), + "message" -> "Argument 'cannotBeNull' has wrong value: Null value was provided for the NotNull Type 'String!' at path 'cannotBeNull'..", + "path" -> Vector("withNonNullArg"), + "locations" -> Vector( + Map("line" -> 3, "column" -> 13), + Map("line" -> 3, "column" -> 42))))), validateQuery = false) // Note: validation should identify this issue first (variables in allowed @@ -616,12 +616,12 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G """, args = JsObject(/* Intentionally missing variable */), expected = Map( - "data" → Map("withNonNullArg" → null), - "errors" → Vector( + "data" -> Map("withNonNullArg" -> null), + "errors" -> Vector( Map( - "message" → "Null value was provided for the NotNull Type 'String!' at path 'cannotBeNull'.", - "path" → Vector("withNonNullArg"), - "locations" → Vector(Map("line" → 3, "column" → 13))))), + "message" -> "Null value was provided for the NotNull Type 'String!' at path 'cannotBeNull'.", + "path" -> Vector("withNonNullArg"), + "locations" -> Vector(Map("line" -> 3, "column" -> 13))))), validateQuery = false) "field error when non-null arg provided variable with explicit null value" in SimpleGraphQlSupport.check(schemaWithNonNullArg, (), @@ -630,14 +630,14 @@ class NotNullSpec extends WordSpec with Matchers with FutureResultSupport with G withNonNullArg (cannotBeNull: $testVar) } """, - args = JsObject("testVar" → JsNull), + args = JsObject("testVar" -> JsNull), expected = Map( - "data" → Map("withNonNullArg" → null), - "errors" → Vector( + "data" -> Map("withNonNullArg" -> null), + "errors" -> Vector( Map( - "message" → "Null value was provided for the NotNull Type 'String!' at path 'cannotBeNull'.", - "path" → Vector("withNonNullArg"), - "locations" → Vector(Map("line" → 3, "column" → 13)))))) + "message" -> "Null value was provided for the NotNull Type 'String!' at path 'cannotBeNull'.", + "path" -> Vector("withNonNullArg"), + "locations" -> Vector(Map("line" -> 3, "column" -> 13)))))) } } } diff --git a/src/test/scala/sangria/execution/ProjectorSpec.scala b/src/test/scala/sangria/execution/ProjectorSpec.scala index 5add9bd1..18d5c1b4 100644 --- a/src/test/scala/sangria/execution/ProjectorSpec.scala +++ b/src/test/scala/sangria/execution/ProjectorSpec.scala @@ -21,24 +21,24 @@ class ProjectorSpec extends WordSpec with Matchers with FutureResultSupport { val StringAttributeType = ObjectType("StringAttribute", interfaces[Unit, (String, Any)](ProductAttributeType), fields[Unit, (String, Any)]( Field("name", StringType, resolve = _.value._1), - Field("strValue", StringType, resolve = _.value._2.asInstanceOf[String]))).withInstanceCheck((v, _, _) ⇒ v.asInstanceOf[(String, Any)]._2.isInstanceOf[String]) + Field("strValue", StringType, resolve = _.value._2.asInstanceOf[String]))).withInstanceCheck((v, _, _) => v.asInstanceOf[(String, Any)]._2.isInstanceOf[String]) val IntAttributeType = ObjectType("IntAttribute", interfaces[Unit, (String, Any)](ProductAttributeType), fields[Unit, (String, Any)]( Field("name", StringType, resolve = _.value._1), - Field("intValue", IntType, resolve = _.value._2.asInstanceOf[Int]))).withInstanceCheck((v, _, _) ⇒ v.asInstanceOf[(String, Any)]._2.isInstanceOf[Int]) + Field("intValue", IntType, resolve = _.value._2.asInstanceOf[Int]))).withInstanceCheck((v, _, _) => v.asInstanceOf[(String, Any)]._2.isInstanceOf[Int]) - val VariantType = ObjectType("Variant", () ⇒ fields[Unit, Variant]( + val VariantType = ObjectType("Variant", () => fields[Unit, Variant]( Field("id", IDType, resolve = _.value.id), - Field("attributes", ListType(ProductAttributeType), resolve = _ ⇒ List("foo" → "hello", "bar" → 123)), + Field("attributes", ListType(ProductAttributeType), resolve = _ => List("foo" -> "hello", "bar" -> 123)), Field("mixed", StringType, tags = ProjectionName("mixed1") :: ProjectionName("mixed2") :: Nil, resolve = _.value.id), - Field("typeId", StringType, tags = ProjectionExclude :: Nil, resolve = _ ⇒ "variant"), + Field("typeId", StringType, tags = ProjectionExclude :: Nil, resolve = _ => "variant"), Field("relatedProducts", ListType(ProductType), tags = ProjectionName("rp") :: Nil, - resolve = Projector(1, (ctx, projected) ⇒ projected match { - case Vector(ProjectedName("id", _)) ⇒ Value(ctx.value.relatedProductIds map (Left(_))) - case _ ⇒ ProductDefer(ctx.value.relatedProductIds) + resolve = Projector(1, (ctx, projected) => projected match { + case Vector(ProjectedName("id", _)) => Value(ctx.value.relatedProductIds map (Left(_))) + case _ => ProductDefer(ctx.value.relatedProductIds) })) )) @@ -47,8 +47,8 @@ class ProjectorSpec extends WordSpec with Matchers with FutureResultSupport { Field("id", IDType, resolve = _.value.fold(identity, _.id)), Field("variantIds", ListType(IDType), tags = ProjectionName("masterVariant.id") :: ProjectionName("variants.id") :: Nil, - resolve = _ ⇒ Nil), - Field("typeId", StringType, tags = ProjectionExclude :: Nil, resolve = _ ⇒ "product"), + resolve = _ => Nil), + Field("typeId", StringType, tags = ProjectionExclude :: Nil, resolve = _ => "product"), Field("masterVariant", VariantType, tags = ProjectionName("master1") :: ProjectionName("master2") :: Nil, resolve = _.value.right.get.variants.head), @@ -57,11 +57,11 @@ class ProjectorSpec extends WordSpec with Matchers with FutureResultSupport { val QueryType = ObjectType("Query", fields[Ctx, Unit]( Field("products", ListType(ProductType), resolve = _.ctx.products map (Right(_))), - Field("projectAll", ListType(ProductType), resolve = Projector((ctx, proj) ⇒ { + Field("projectAll", ListType(ProductType), resolve = Projector((ctx, proj) => { ctx.ctx.allProjections = proj ctx.ctx.products map (Right(_)) })), - Field("projectOne", ListType(ProductType), resolve = Projector(1, (ctx, proj) ⇒ { + Field("projectOne", ListType(ProductType), resolve = Projector(1, (ctx, proj) => { ctx.ctx.oneLevelprojections = proj ctx.ctx.products map (Right(_)) })) @@ -90,8 +90,8 @@ class ProjectorSpec extends WordSpec with Matchers with FutureResultSupport { class ProductResolver extends DeferredResolver[WithProducts] { override def resolve(deferred: Vector[Deferred[Any]], ctx: WithProducts, queryState: Any)(implicit ec: ExecutionContext) = deferred map { - case ProductDefer(ids) ⇒ - Future.fromTry(Try(ids map (id ⇒ Right(ctx.products.find(_.id == id).get)))) + case ProductDefer(ids) => + Future.fromTry(Try(ids map (id => Right(ctx.products.find(_.id == id).get)))) } } @@ -135,47 +135,47 @@ class ProjectorSpec extends WordSpec with Matchers with FutureResultSupport { val ctx = new Ctx Executor.execute(schema, query, ctx, deferredResolver = new ProductResolver).await should be ( - Map("data" → + Map("data" -> Map( - "projectAll" → + "projectAll" -> List( Map( - "id" → "1", - "typeId" → "product", - "variants" → List( + "id" -> "1", + "typeId" -> "product", + "variants" -> List( Map( - "id" → "2", - "attributes" → Vector( - Map("name" → "foo", "strValue" → "hello"), - Map("name" → "bar", "intValue" → 123)), - "typeId" → "variant", - "relatedProducts" → List( + "id" -> "2", + "attributes" -> Vector( + Map("name" -> "foo", "strValue" -> "hello"), + Map("name" -> "bar", "intValue" -> 123)), + "typeId" -> "variant", + "relatedProducts" -> List( Map( - "id" → "1", - "typeId" → "product", - "variants" → List( - Map("id" → "2"))), + "id" -> "1", + "typeId" -> "product", + "variants" -> List( + Map("id" -> "2"))), Map( - "id" → "2", - "typeId" → "product", - "variants" → Nil))))), + "id" -> "2", + "typeId" -> "product", + "variants" -> Nil))))), Map( - "id" → "2", - "typeId" → "product", - "variants" → Nil)), - "projectOne" → + "id" -> "2", + "typeId" -> "product", + "variants" -> Nil)), + "projectOne" -> List( Map( - "id" → "1", - "typeId" → "product", - "variants" → List( + "id" -> "1", + "typeId" -> "product", + "variants" -> List( Map( - "id" → "2", - "typeId" → "variant"))), + "id" -> "2", + "typeId" -> "variant"))), Map( - "id" → "2", - "typeId" → "product", - "variants" → Nil))))) + "id" -> "2", + "typeId" -> "product", + "variants" -> Nil))))) ctx.allProjections should be ( Vector( @@ -230,30 +230,30 @@ class ProjectorSpec extends WordSpec with Matchers with FutureResultSupport { val ctx = new Ctx Executor.execute(schema, query, ctx, deferredResolver = new ProductResolver).await should be ( - Map("data" → + Map("data" -> Map( - "projectAll" → Vector( + "projectAll" -> Vector( Map( - "id" → "1", - "variantIds" → Nil, - "masterVariant" → Map("mixed" → "1"), - "variants" → Vector(Map("id" → "2", "mixed" → "2"))), + "id" -> "1", + "variantIds" -> Nil, + "masterVariant" -> Map("mixed" -> "1"), + "variants" -> Vector(Map("id" -> "2", "mixed" -> "2"))), Map( - "id" → "2", - "variantIds" → Nil, - "masterVariant" → Map("mixed" → "1"), - "variants" → Nil)), - "projectOne" → Vector( + "id" -> "2", + "variantIds" -> Nil, + "masterVariant" -> Map("mixed" -> "1"), + "variants" -> Nil)), + "projectOne" -> Vector( Map( - "id" → "1", - "variantIds" → Nil, - "masterVariant" → Map("mixed" → "1"), - "variants" → Vector(Map("id" → "2", "mixed" → "2"))), + "id" -> "1", + "variantIds" -> Nil, + "masterVariant" -> Map("mixed" -> "1"), + "variants" -> Vector(Map("id" -> "2", "mixed" -> "2"))), Map( - "id" → "2", - "variantIds" → Nil, - "masterVariant" → Map("mixed" → "1"), - "variants" → Nil))))) + "id" -> "2", + "variantIds" -> Nil, + "masterVariant" -> Map("mixed" -> "1"), + "variants" -> Nil))))) ctx.allProjections should be ( Vector( @@ -282,4 +282,4 @@ class ProjectorSpec extends WordSpec with Matchers with FutureResultSupport { ProjectedName("variants", Vector.empty))) } } -} \ No newline at end of file +} diff --git a/src/test/scala/sangria/execution/QueryReducerSpec.scala b/src/test/scala/sangria/execution/QueryReducerSpec.scala index c720b3cd..6eee64a2 100644 --- a/src/test/scala/sangria/execution/QueryReducerSpec.scala +++ b/src/test/scala/sangria/execution/QueryReducerSpec.scala @@ -23,12 +23,12 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { complexity = 2.5D, coerceOutput = valueOutput, coerceUserInput = { - case s: String ⇒ Right(s) - case _ ⇒ Left(StringCoercionViolation) + case s: String => Right(s) + case _ => Left(StringCoercionViolation) }, coerceInput = { - case ast.StringValue(id, _, _, _, _) ⇒ Right(id) - case _ ⇒ Left(StringCoercionViolation) + case ast.StringValue(id, _, _, _, _) => Right(id) + case _ => Left(StringCoercionViolation) }) trait Named { @@ -42,12 +42,12 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { val NamedType = InterfaceType("Named", fields[Any, Named]( Field("name", OptionType(StringType), - complexity = Some((_, _, _) ⇒ 10D), + complexity = Some((_, _, _) => 10D), resolve = _.value.name))) val DogType = ObjectType("Dog", interfaces[Any, Dog](NamedType), fields[Any, Dog]( Field("barks", OptionType(BooleanType), - complexity = Some((_, _, _) ⇒ 1.2D), + complexity = Some((_, _, _) => 1.2D), resolve = _.value.barks))) val CatType = ObjectType("Cat", interfaces[Any, Cat](NamedType), fields[Any, Cat]( @@ -55,44 +55,44 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { val PetType = UnionType[Any]("Pet", types = DogType :: CatType :: Nil) - lazy val TestType: ObjectType[Info, Unit] = ObjectType("Test", () ⇒ fields[Info, Unit]( - Field("scalar", StringType, resolve = _ ⇒ "tests"), + lazy val TestType: ObjectType[Info, Unit] = ObjectType("Test", () => fields[Info, Unit]( + Field("scalar", StringType, resolve = _ => "tests"), Field("scalarCustom", StringType, - complexity = Some((_, _, c) ⇒ 3.0D + c), - resolve = _ ⇒ "testsc"), + complexity = Some((_, _, c) => 3.0D + c), + resolve = _ => "testsc"), Field("scalarArgs", StringType, arguments = Argument("foo", StringType) :: Nil, - resolve = _ ⇒ "testsa"), - Field("complexScalar", TestScalar, resolve = _ ⇒ "testcs"), + resolve = _ => "testsa"), + Field("complexScalar", TestScalar, resolve = _ => "testcs"), Field("nestList", ListType(TestType), arguments = Argument("size", IntType) :: Nil, - complexity = Some((_, args, c) ⇒ 1.1D + args.arg[Int]("size") * c), - resolve = ctx ⇒ (1 to ctx.arg[Int]("size")) map (_ ⇒ ())), - Field("nest", TestType, resolve = _ ⇒ ()), + complexity = Some((_, args, c) => 1.1D + args.arg[Int]("size") * c), + resolve = ctx => (1 to ctx.arg[Int]("size")) map (_ => ())), + Field("nest", TestType, resolve = _ => ()), Field("named", OptionType(ListType(NamedType)), arguments = Argument("size", IntType) :: Nil, - complexity = Some((_, args, c) ⇒ 4.0D + args.arg[Int]("size") * c), - resolve = _ ⇒ List(Dog(Some("Bob"), Some(true)), Cat(Some("Apples"), Some(true)))), + complexity = Some((_, args, c) => 4.0D + args.arg[Int]("size") * c), + resolve = _ => List(Dog(Some("Bob"), Some(true)), Cat(Some("Apples"), Some(true)))), Field("pets", OptionType(ListType(PetType)), arguments = Argument("size", IntType) :: Nil, - complexity = Some((_, args, c) ⇒ 3.5D + args.arg[Int]("size") * c), - resolve = _ ⇒ List(Dog(Some("Bob"), Some(true)), Cat(Some("Apples"), Some(true)))), + complexity = Some((_, args, c) => 3.5D + args.arg[Int]("size") * c), + resolve = _ => List(Dog(Some("Bob"), Some(true)), Cat(Some("Apples"), Some(true)))), Field("a", StringType, tags = ATag(1) :: Nil, - resolve = _ ⇒ "testa"), + resolve = _ => "testa"), Field("b", StringType, tags = BTag :: Nil, - resolve = _ ⇒ "testb"), + resolve = _ => "testb"), Field("ab", StringType, tags = ATag(2) :: BTag :: Nil, - resolve = _ ⇒ "testab"), + resolve = _ => "testab"), Field("info", ListType(IntType), resolve = _.ctx.nums) )) val schema = Schema(TestType) val exceptionHandler = ExceptionHandler { - case (m, e: IllegalArgumentException) ⇒ HandledException(e.getMessage) + case (m, e: IllegalArgumentException) => HandledException(e.getMessage) } "MeasureComplexity" should { @@ -117,28 +117,28 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { var complexity = 0.0D - val complReducer = QueryReducer.measureComplexity[Info] { (c, ctx) ⇒ + val complReducer = QueryReducer.measureComplexity[Info] { (c, ctx) => complexity = c ctx } Executor.execute(schema, query, userContext = Info(Nil), queryReducers = complReducer :: Nil).await should be ( - Map("data" → + Map("data" -> Map( - "scalar" → "tests", - "nestList" → List( + "scalar" -> "tests", + "nestList" -> List( Map( - "complexScalar" → "testcs", - "nest" → Map("cc" → "testsc", "dd" → "testsc"), - "foo" → Map("cc" → "testsc", "dd" → "testsc")), + "complexScalar" -> "testcs", + "nest" -> Map("cc" -> "testsc", "dd" -> "testsc"), + "foo" -> Map("cc" -> "testsc", "dd" -> "testsc")), Map( - "complexScalar" → "testcs", - "nest" → Map("cc" → "testsc", "dd" → "testsc"), - "foo" → Map("cc" → "testsc", "dd" → "testsc")), + "complexScalar" -> "testcs", + "nest" -> Map("cc" -> "testsc", "dd" -> "testsc"), + "foo" -> Map("cc" -> "testsc", "dd" -> "testsc")), Map( - "complexScalar" → "testcs", - "nest" → Map("cc" → "testsc", "dd" → "testsc"), - "foo" → Map("cc" → "testsc", "dd" → "testsc")))))) + "complexScalar" -> "testcs", + "nest" -> Map("cc" -> "testsc", "dd" -> "testsc"), + "foo" -> Map("cc" -> "testsc", "dd" -> "testsc")))))) complexity should be (54.6D) } @@ -157,21 +157,21 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { var complexity = 0.0D - val complReducer = QueryReducer.measureComplexity[Info] { (c, ctx) ⇒ + val complReducer = QueryReducer.measureComplexity[Info] { (c, ctx) => complexity = c ctx } - val vars = mapVars("size" → 3) + val vars = mapVars("size" -> 3) Executor.execute(schema, query, userContext = Info(Nil), variables = vars, queryReducers = complReducer :: Nil).await should be ( Map( - "data" → Map( - "scalar" → "tests", - "nestList" → Vector( - Map("complexScalar" → "testcs"), - Map("complexScalar" → "testcs"), - Map("complexScalar" → "testcs"))))) + "data" -> Map( + "scalar" -> "tests", + "nestList" -> Vector( + Map("complexScalar" -> "testcs"), + Map("complexScalar" -> "testcs"), + Map("complexScalar" -> "testcs"))))) complexity should be (12.6D) } @@ -209,16 +209,16 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { var complexity = 0.0D - val complReducer = QueryReducer.measureComplexity[Info] { (c, ctx) ⇒ + val complReducer = QueryReducer.measureComplexity[Info] { (c, ctx) => complexity = c ctx } Executor.execute(schema, query, userContext = Info(Nil), queryReducers = complReducer :: Nil).await should be ( - Map("data" → Map( - "scalarArgs" → "testsa", - "baz" → "testsa", - "test2" → "tests", "nest" → Map()))) + Map("data" -> Map( + "scalarArgs" -> "testsa", + "baz" -> "testsa", + "test2" -> "tests", "nest" -> Map()))) complexity should be (4.0D) } @@ -256,7 +256,7 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { var complexity = 0.0D - val complReducer = QueryReducer.measureComplexity[Info] { (c, ctx) ⇒ + val complReducer = QueryReducer.measureComplexity[Info] { (c, ctx) => complexity = c ctx } @@ -300,23 +300,23 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { var complexity = 0.0D - val complReducer = QueryReducer.measureComplexity[Info] { (c, ctx) ⇒ + val complReducer = QueryReducer.measureComplexity[Info] { (c, ctx) => complexity = c ctx } Executor.execute(schema, query, userContext = Info(Nil), queryReducers = complReducer :: Nil).await should be ( - Map("data" → + Map("data" -> Map( - "n1" → List( - Map("name" → "Bob"), - Map("name" → "Apples")), - "n2" → List( - Map("name" → "Bob"), - Map("name" → "Apples", "meows" → true)), - "named" → List( - Map("name" → "Bob", "barks" → true), - Map("name" → "Apples", "meows" → true))))) + "n1" -> List( + Map("name" -> "Bob"), + Map("name" -> "Apples")), + "n2" -> List( + Map("name" -> "Bob"), + Map("name" -> "Apples", "meows" -> true)), + "named" -> List( + Map("name" -> "Bob", "barks" -> true), + Map("name" -> "Apples", "meows" -> true))))) complexity should be (189.8D) } @@ -357,23 +357,23 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { var complexity = 0.0D - val complReducer = QueryReducer.measureComplexity[Info] { (c, ctx) ⇒ + val complReducer = QueryReducer.measureComplexity[Info] { (c, ctx) => complexity = c ctx } Executor.execute(schema, query, userContext = Info(Nil), queryReducers = complReducer :: Nil).await should be ( - Map("data" → + Map("data" -> Map( - "p1" → List( - Map("name" → "Bob"), - Map("name" → "Apples")), - "p2" → List( + "p1" -> List( + Map("name" -> "Bob"), + Map("name" -> "Apples")), + "p2" -> List( Map(), - Map("name" → "Apples", "meows" → true)), - "pets" → List( - Map("name" → "Bob", "barks" → true), - Map("name" → "Apples", "meows" → true))))) + Map("name" -> "Apples", "meows" -> true)), + "pets" -> List( + Map("name" -> "Bob", "barks" -> true), + Map("name" -> "Apples", "meows" -> true))))) complexity should be (188.3D) } @@ -399,17 +399,17 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { var complexity = 0.0D - val complReducer = QueryReducer.measureComplexity[Info] { (c, ctx) ⇒ + val complReducer = QueryReducer.measureComplexity[Info] { (c, ctx) => complexity = c ctx } Executor.execute(schema, query, userContext = Info(Nil), queryReducers = complReducer :: Nil).await should be ( - Map("data" → + Map("data" -> Map( - "pets" → List( - Map("name" → "Bob", "barks" → true), - Map("name" → "Apples", "meows" → true))))) + "pets" -> List( + Map("name" -> "Bob", "barks" -> true), + Map("name" -> "Apples", "meows" -> true))))) complexity should be (115.5D) } @@ -426,7 +426,7 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { } """) - val rejectComplexQuery = QueryReducer.rejectComplexQueries[Info](14, (c, _) ⇒ + val rejectComplexQuery = QueryReducer.rejectComplexQueries[Info](14, (c, _) => new IllegalArgumentException(s"Too complex query: max allowed complexity is 14.0, but got $c")) val error = intercept [QueryReducingError] (Executor.execute(schema, query, @@ -451,23 +451,23 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { var complexity = 0.0D - val complReducer = QueryReducer.measureComplexity[Info] { (c, ctx) ⇒ + val complReducer = QueryReducer.measureComplexity[Info] { (c, ctx) => complexity = c ctx } - val tagColl = QueryReducer.collectTags[Info, Int] {case ATag(num) ⇒ num} ((nums, ctx) ⇒ + val tagColl = QueryReducer.collectTags[Info, Int] {case ATag(num) => num} ((nums, ctx) => ctx.copy(nums = nums)) Executor.execute(schema, query, userContext = Info(Nil), queryReducers = complReducer :: tagColl :: Nil).await should be ( - Map("data" → + Map("data" -> Map( - "info" → List(1), - "a" → "testa", - "nest" → - Map("b" → "testb")))) + "info" -> List(1), + "a" -> "testa", + "nest" -> + Map("b" -> "testb")))) complexity should be (4D) } @@ -485,21 +485,21 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { } """) - val tagColl = QueryReducer.collectTags[Info, Int] {case ATag(num) ⇒ num + 123} ((nums, ctx) ⇒ + val tagColl = QueryReducer.collectTags[Info, Int] {case ATag(num) => num + 123} ((nums, ctx) => Future.successful(ctx.copy(nums = nums))) Executor.execute(schema, query, userContext = Info(Nil), queryReducers = tagColl :: Nil).await should be ( - Map("data" → + Map("data" -> Map( - "info" → List(124, 124, 125), - "a" → "testa", - "nest" → + "info" -> List(124, 124, 125), + "a" -> "testa", + "nest" -> Map( - "a" → "testa", - "b" → "testb", - "ab" → "testab")))) + "a" -> "testa", + "b" -> "testb", + "ab" -> "testab")))) } "collect mapped tag values and update a user context using `TryValue`" in { @@ -515,21 +515,21 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { } """) - val tagColl = QueryReducer.collectTags[Info, Int] {case ATag(num) ⇒ num + 123} ((nums, ctx) ⇒ + val tagColl = QueryReducer.collectTags[Info, Int] {case ATag(num) => num + 123} ((nums, ctx) => Success(ctx.copy(nums = nums))) Executor.execute(schema, query, userContext = Info(Nil), queryReducers = tagColl :: Nil).await should be ( - Map("data" → + Map("data" -> Map( - "info" → List(124, 124, 125), - "a" → "testa", - "nest" → + "info" -> List(124, 124, 125), + "a" -> "testa", + "nest" -> Map( - "a" → "testa", - "b" → "testb", - "ab" → "testab")))) + "a" -> "testa", + "b" -> "testb", + "ab" -> "testab")))) } "handle thrown exceptions correctly" in { @@ -545,14 +545,14 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { } """) - val tagColl = QueryReducer.collectTags[Info, Int] {case ATag(num) ⇒ num + 123} ((nums, ctx) ⇒ + val tagColl = QueryReducer.collectTags[Info, Int] {case ATag(num) => num + 123} ((nums, ctx) => throw new IllegalArgumentException("boom!")) Executor.execute(schema, query, userContext = Info(Nil), exceptionHandler = exceptionHandler, queryReducers = tagColl :: Nil).awaitAndRecoverQueryAnalysisScala should be ( - Map("data" → null, "errors" → List(Map("message" → "boom!")))) + Map("data" -> null, "errors" -> List(Map("message" -> "boom!")))) } "handle `TryValue` exceptions correctly" in { @@ -568,14 +568,14 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { } """) - val tagColl = QueryReducer.collectTags[Info, Int] {case ATag(num) ⇒ num + 123} ((nums, ctx) ⇒ + val tagColl = QueryReducer.collectTags[Info, Int] {case ATag(num) => num + 123} ((nums, ctx) => Failure(new IllegalArgumentException("boom!"))) Executor.execute(schema, query, userContext = Info(Nil), exceptionHandler = exceptionHandler, queryReducers = tagColl :: Nil).awaitAndRecoverQueryAnalysisScala should be ( - Map("data" → null, "errors" → List(Map("message" → "boom!")))) + Map("data" -> null, "errors" -> List(Map("message" -> "boom!")))) } "handle `FutureValue` exceptions correctly" in { @@ -591,14 +591,14 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { } """) - val tagColl = QueryReducer.collectTags[Info, Int] {case ATag(num) ⇒ num + 123} ((nums, ctx) ⇒ + val tagColl = QueryReducer.collectTags[Info, Int] {case ATag(num) => num + 123} ((nums, ctx) => Future.failed(new IllegalArgumentException("boom!"))) Executor.execute(schema, query, userContext = Info(Nil), exceptionHandler = exceptionHandler, queryReducers = tagColl :: Nil).awaitAndRecoverQueryAnalysisScala should be ( - Map("data" → null, "errors" → List(Map("message" → "boom!")))) + Map("data" -> null, "errors" -> List(Map("message" -> "boom!")))) } "collect all mapped tag values and update a user context" in { @@ -615,25 +615,25 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { var complexity = 0.0D - val complReducer = QueryReducer.measureComplexity[Info] { (c, ctx) ⇒ + val complReducer = QueryReducer.measureComplexity[Info] { (c, ctx) => complexity = c ctx } - val tagColl = QueryReducer.collectTags[Info, Int] {case ATag(num) ⇒ num} ((nums, ctx) ⇒ + val tagColl = QueryReducer.collectTags[Info, Int] {case ATag(num) => num} ((nums, ctx) => ctx.copy(nums = nums)) Executor.execute(schema, query, userContext = Info(Nil), queryReducers = complReducer :: tagColl :: Nil).await - Map("data" → + Map("data" -> Map( - "info" → List(1, 2), - "a" → "testa", - "nest" → + "info" -> List(1, 2), + "a" -> "testa", + "nest" -> Map( - "b" → "testb", - "ab" → "testab"))) + "b" -> "testb", + "ab" -> "testab"))) complexity should be (5D) } @@ -644,7 +644,7 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { val Success(query) = QueryParser.parse(queryStr) val depth = AtomicInt(0) - val reducer = QueryReducer.measureDepth[Any]((d, ctx) ⇒ { + val reducer = QueryReducer.measureDepth[Any]((d, ctx) => { depth.set(d) ctx }) diff --git a/src/test/scala/sangria/execution/ScalarAliasSpec.scala b/src/test/scala/sangria/execution/ScalarAliasSpec.scala index b07a0c26..e95a3d4f 100644 --- a/src/test/scala/sangria/execution/ScalarAliasSpec.scala +++ b/src/test/scala/sangria/execution/ScalarAliasSpec.scala @@ -24,17 +24,17 @@ class ScalarAliasSpec extends WordSpec with Matchers with FutureResultSupport { case class RefineViolation(error: String) extends ValueCoercionViolation(error) implicit val UserIdType = ScalarAlias[UserId, String]( - StringType, _.id, id ⇒ Right(UserId(id))) + StringType, _.id, id => Right(UserId(id))) implicit val PositiveIntType = ScalarAlias[Int Refined Positive, Int]( - IntType, _.value, i ⇒ refineV[Positive](i).left.map(RefineViolation)) + IntType, _.value, i => refineV[Positive](i).left.map(RefineViolation)) case object IDViolation extends ValueCoercionViolation("Invalid ID") val UUIDType = ScalarAlias[UUID, String](StringType, toScalar = _.toString, - fromScalar = idString ⇒ try Right(UUID.fromString(idString)) catch { - case _: IllegalArgumentException ⇒ Left(IDViolation) + fromScalar = idString => try Right(UUID.fromString(idString)) catch { + case _: IllegalArgumentException => Left(IDViolation) }) val UserType = deriveObjectType[Unit, User]() @@ -56,10 +56,10 @@ class ScalarAliasSpec extends WordSpec with Matchers with FutureResultSupport { Field("user", UserType, arguments = UserIdArg :: NumArg :: ComplexArg :: Nil, resolve = _.withArgs(UserIdArg, NumArg, ComplexArg)( - (userId, num, complex) ⇒ User(userId, complex("userId").asInstanceOf[Option[UserId]], "generated", num))), + (userId, num, complex) => User(userId, complex("userId").asInstanceOf[Option[UserId]], "generated", num))), Field("idTest", UUIDType, arguments = UUIDArg :: Nil, - resolve = c ⇒ { + resolve = c => { val uuid: UUID = c.arg(UUIDArg) uuid }) @@ -94,41 +94,41 @@ class ScalarAliasSpec extends WordSpec with Matchers with FutureResultSupport { Executor.execute(schema, query).await should be ( Map( - "data" → Map( - "user" → Map( - "id" → "1234", - "id2" → "5678", - "name" → "generated", - "num" → 42), - "__type" → Map( - "name" → "User", - "fields" → Vector( + "data" -> Map( + "user" -> Map( + "id" -> "1234", + "id2" -> "5678", + "name" -> "generated", + "num" -> 42), + "__type" -> Map( + "name" -> "User", + "fields" -> Vector( Map( - "name" → "id", - "type" → Map( - "kind" → "NON_NULL", - "ofType" → Map( - "kind" → "SCALAR", - "name" → "String"))), + "name" -> "id", + "type" -> Map( + "kind" -> "NON_NULL", + "ofType" -> Map( + "kind" -> "SCALAR", + "name" -> "String"))), Map( - "name" → "id2", - "type" → Map( - "kind" → "SCALAR", - "ofType" → null)), + "name" -> "id2", + "type" -> Map( + "kind" -> "SCALAR", + "ofType" -> null)), Map( - "name" → "name", - "type" → Map( - "kind" → "NON_NULL", - "ofType" → Map( - "kind" → "SCALAR", - "name" → "String"))), + "name" -> "name", + "type" -> Map( + "kind" -> "NON_NULL", + "ofType" -> Map( + "kind" -> "SCALAR", + "name" -> "String"))), Map( - "name" → "num", - "type" → Map( - "kind" → "NON_NULL", - "ofType" → Map( - "kind" → "SCALAR", - "name" → "Int")))))))) + "name" -> "num", + "type" -> Map( + "kind" -> "NON_NULL", + "ofType" -> Map( + "kind" -> "SCALAR", + "name" -> "Int")))))))) } "represent correct transforms UUID values coming from variables (also after AST-based schema extention)" in { @@ -141,19 +141,19 @@ class ScalarAliasSpec extends WordSpec with Matchers with FutureResultSupport { val schema = Schema(ObjectType("Query", fields[Unit, Unit]( Field("idTest", UUIDType, arguments = UUIDArg :: Nil, - resolve = c ⇒ { + resolve = c => { val uuid: UUID = c.arg(UUIDArg) uuid }), Field("cidTest", OptionType(ListType(ListType(OptionType(UUIDType)))), arguments = ComplexUUIDArg :: Nil, - resolve = c ⇒ { + resolve = c => { val cuuid: Option[Seq[Seq[Option[UUID]]]] = c.arg(ComplexUUIDArg) cuuid }), Field("inpTest", StringType, arguments = InpArg :: Nil, - resolve = c ⇒ { + resolve = c => { val inp = c.arg(InpArg) val id: UUID = inp("id").asInstanceOf[UUID] @@ -178,11 +178,11 @@ class ScalarAliasSpec extends WordSpec with Matchers with FutureResultSupport { """ val vars = Map( - "id" → "f28efae0-8808-4514-b356-02808d4e936c", - "inp" → Map( - "id" → "9454d352-2ce5-11e7-93ae-92361f002671", - "id1" → "9454d5e6-2ce5-11e7-93ae-92361f002671"), - "ids" → Seq(Seq("9153a6c1-fb4b-4d69-b9aa-ee95765cf093", null, "1a1e42c3-b79b-4dbb-ad89-4ee223ffb6be"), Seq(null, "4e4548b0-87db-49b6-a764-2d84c2322fb7"))) + "id" -> "f28efae0-8808-4514-b356-02808d4e936c", + "inp" -> Map( + "id" -> "9454d352-2ce5-11e7-93ae-92361f002671", + "id1" -> "9454d5e6-2ce5-11e7-93ae-92361f002671"), + "ids" -> Seq(Seq("9153a6c1-fb4b-4d69-b9aa-ee95765cf093", null, "1a1e42c3-b79b-4dbb-ad89-4ee223ffb6be"), Seq(null, "4e4548b0-87db-49b6-a764-2d84c2322fb7"))) val schema1 = schema.extend(gql""" @@ -191,24 +191,24 @@ class ScalarAliasSpec extends WordSpec with Matchers with FutureResultSupport { } """) - Seq(schema, schema1) foreach { s ⇒ + Seq(schema, schema1) foreach { s => Executor.execute(s, query, variables = scalaInput(vars)).await should be ( Map( - "data" → Map( - "i1" → "f28efae0-8808-4514-b356-02808d4e936c", - "i2" → "9454db18-2ce5-11e7-93ae-92361f002671", - "inp1" → "f28efae0-8808-4514-b356-02808d4e936c/9454db18-2ce5-11e7-93ae-92361f002671", - "inp2" → "9454d352-2ce5-11e7-93ae-92361f002671/9454d5e6-2ce5-11e7-93ae-92361f002671", - "ci1" → Vector( + "data" -> Map( + "i1" -> "f28efae0-8808-4514-b356-02808d4e936c", + "i2" -> "9454db18-2ce5-11e7-93ae-92361f002671", + "inp1" -> "f28efae0-8808-4514-b356-02808d4e936c/9454db18-2ce5-11e7-93ae-92361f002671", + "inp2" -> "9454d352-2ce5-11e7-93ae-92361f002671/9454d5e6-2ce5-11e7-93ae-92361f002671", + "ci1" -> Vector( Vector("ad6a2dd9-ccd0-44dc-86d2-80cf945cb16e", "6297bd9e-2647-4770-a791-5c3f44bc56ee"), Vector(null, "a9525f38-380b-4226-a362-471ece962f06")), - "ci2" → Vector( + "ci2" -> Vector( Vector("f28efae0-8808-4514-b356-02808d4e936c", null, "9454db18-2ce5-11e7-93ae-92361f002671"), Vector("dd96051e-21c5-468b-ad93-43241acd9540")), - "ci3" → Vector( + "ci3" -> Vector( Vector("9153a6c1-fb4b-4d69-b9aa-ee95765cf093", null, "1a1e42c3-b79b-4dbb-ad89-4ee223ffb6be"), Vector(null, "4e4548b0-87db-49b6-a764-2d84c2322fb7")), - "ci4" → Vector( + "ci4" -> Vector( Vector("ad6a2dd9-ccd0-44dc-86d2-80cf945cb16e", "6297bd9e-2647-4770-a791-5c3f44bc56ee"), Vector(null, "a9525f38-380b-4226-a362-471ece962f06"))))) } @@ -219,7 +219,7 @@ class ScalarAliasSpec extends WordSpec with Matchers with FutureResultSupport { Field("user", UserType, arguments = UserIdArg :: NumArg :: ComplexArg :: Nil, resolve = _.withArgs(UserIdArg, NumArg, ComplexArg)( - (userId, num, complex) ⇒ User(userId, complex("userId").asInstanceOf[Option[UserId]], "generated " + complex, num))) + (userId, num, complex) => User(userId, complex("userId").asInstanceOf[Option[UserId]], "generated " + complex, num))) ))) val query = @@ -235,9 +235,9 @@ class ScalarAliasSpec extends WordSpec with Matchers with FutureResultSupport { val error = intercept [ValidationError] (Executor.execute(schema, query).await) assertViolations(error.violations, - "Expected type 'Int!', found '-123'. Predicate failed: (-123 > 0)." → Seq(Pos(3, 33)), - "Expected type 'String', found '1'. String value expected" → Seq(Pos(3, 51)), - "Expected type 'Int', found '-5'. Predicate failed: (-5 > 0)." → Seq(Pos(3, 63))) + "Expected type 'Int!', found '-123'. Predicate failed: (-123 > 0)." -> Seq(Pos(3, 33)), + "Expected type 'String', found '1'. String value expected" -> Seq(Pos(3, 51)), + "Expected type 'Int', found '-5'. Predicate failed: (-5 > 0)." -> Seq(Pos(3, 63))) } } -} \ No newline at end of file +} diff --git a/src/test/scala/sangria/execution/ScalarMiddlewareSpec.scala b/src/test/scala/sangria/execution/ScalarMiddlewareSpec.scala index 53add560..c11ce184 100644 --- a/src/test/scala/sangria/execution/ScalarMiddlewareSpec.scala +++ b/src/test/scala/sangria/execution/ScalarMiddlewareSpec.scala @@ -22,7 +22,7 @@ class ScalarMiddlewareSpec extends WordSpec with Matchers with FutureResultSuppo } val EncodedIdType = ScalarAlias[String, String]( - StringType, identity, id ⇒ Right(id)) + StringType, identity, id => Right(id)) val ComplexInputType = InputObjectType("Complex", List( InputField("userId", OptionInputType(EncodedIdType)), @@ -51,15 +51,15 @@ class ScalarMiddlewareSpec extends WordSpec with Matchers with FutureResultSuppo def fromScalar(value: Any, inputType: InputType[_], ctx: Ctx) = { inputType match { - case EncodedIdType ⇒ Some(ctx.decodeId(value.asInstanceOf[String])) - case _ ⇒ None + case EncodedIdType => Some(ctx.decodeId(value.asInstanceOf[String])) + case _ => None } } def toScalar(value: Any, inputType: InputType[_], ctx: Ctx) = inputType match { - case EncodedIdType ⇒ Some(ctx.encodeId(value.asInstanceOf[String])) - case _ ⇒ None + case EncodedIdType => Some(ctx.encodeId(value.asInstanceOf[String])) + case _ => None } } @@ -69,7 +69,7 @@ class ScalarMiddlewareSpec extends WordSpec with Matchers with FutureResultSuppo Field("test", OptionType(EncodedIdType), arguments = IdArg :: ComplexArg :: Nil, resolve = _.withArgs(IdArg, ComplexArg)( - (id, complex) ⇒ id + "-" + complex("userId").asInstanceOf[Option[UserId]].get + "-" + complex("name"))) + (id, complex) => id + "-" + complex("userId").asInstanceOf[Option[UserId]].get + "-" + complex("name"))) ))) val query = @@ -84,34 +84,34 @@ class ScalarMiddlewareSpec extends WordSpec with Matchers with FutureResultSuppo val ctx = new Ctx("test-") val vars = ScalaInput.scalaInput(Map( - "id" → "test-c", - "c" → Map( - "userId" → "test-d", - "name" → "bar"))) + "id" -> "test-c", + "c" -> Map( + "userId" -> "test-d", + "name" -> "bar"))) val middleware = new IdEncodingMiddleware :: Nil Executor.execute(schema, query, ctx, variables = vars, middleware = middleware).await should be ( Map( - "data" → Map( - "t1" → "test-a-b-foo", - "t2" → "test-c-d-bar", - "t3" → null), - "errors" → Vector( + "data" -> Map( + "t1" -> "test-a-b-foo", + "t2" -> "test-c-d-bar", + "t3" -> null), + "errors" -> Vector( Map( - "message" → "Argument 'id' has wrong value: invalid id. (line 5, column 13):\n t3: test(id: \"invalid\", c: {userId: \"yay\", name: \"foo\"})\n ^\n (line 5, column 26):\n t3: test(id: \"invalid\", c: {userId: \"yay\", name: \"foo\"})\n ^", - "path" → Vector( + "message" -> "Argument 'id' has wrong value: invalid id. (line 5, column 13):\n t3: test(id: \"invalid\", c: {userId: \"yay\", name: \"foo\"})\n ^\n (line 5, column 26):\n t3: test(id: \"invalid\", c: {userId: \"yay\", name: \"foo\"})\n ^", + "path" -> Vector( "t3"), - "locations" → Vector( - Map("line" → 5, "column" → 13), - Map("line" → 5, "column" → 26))), + "locations" -> Vector( + Map("line" -> 5, "column" -> 13), + Map("line" -> 5, "column" -> 26))), Map( - "message" → "Field 'c.userId' has wrong value: invalid id. (line 5, column 13):\n t3: test(id: \"invalid\", c: {userId: \"yay\", name: \"foo\"})\n ^\n (line 5, column 49):\n t3: test(id: \"invalid\", c: {userId: \"yay\", name: \"foo\"})\n ^", - "path" → Vector( + "message" -> "Field 'c.userId' has wrong value: invalid id. (line 5, column 13):\n t3: test(id: \"invalid\", c: {userId: \"yay\", name: \"foo\"})\n ^\n (line 5, column 49):\n t3: test(id: \"invalid\", c: {userId: \"yay\", name: \"foo\"})\n ^", + "path" -> Vector( "t3"), - "locations" → Vector( - Map("line" → 5, "column" → 13), - Map("line" → 5, "column" → 49)))))) + "locations" -> Vector( + Map("line" -> 5, "column" -> 13), + Map("line" -> 5, "column" -> 49)))))) } "encode and decode scalar value when argument has default value" in { @@ -119,7 +119,7 @@ class ScalarMiddlewareSpec extends WordSpec with Matchers with FutureResultSuppo Field("test", OptionType(EncodedIdType), arguments = IdArgWithDefault :: ComplexArgWithDefault :: Nil, resolve = _.withArgs(IdArgWithDefault, ComplexArgWithDefault)( - (id, complex) ⇒ id + "-" + complex("userId").asInstanceOf[Option[UserId]].get + "-" + complex("name"))) + (id, complex) => id + "-" + complex("userId").asInstanceOf[Option[UserId]].get + "-" + complex("name"))) ))) val query = @@ -135,29 +135,29 @@ class ScalarMiddlewareSpec extends WordSpec with Matchers with FutureResultSuppo val ctx = new Ctx("test-") val vars = ScalaInput.scalaInput(Map( - "id" → "test-c", - "c" → Map( - "userId" → "test-d", - "name" → "bar"))) + "id" -> "test-c", + "c" -> Map( + "userId" -> "test-d", + "name" -> "bar"))) val middleware = new IdEncodingMiddleware :: Nil Executor.execute(schema, query, ctx, variables = vars, middleware = middleware).await should be ( Map( - "data" → Map( - "t1" → "test-a-b-foo", - "t2" → "test-c-d-bar", - "t3" → null, - "t4" → null), - "errors" → Vector( + "data" -> Map( + "t1" -> "test-a-b-foo", + "t2" -> "test-c-d-bar", + "t3" -> null, + "t4" -> null), + "errors" -> Vector( Map( - "message" → "Argument 'id' has wrong value: invalid id. (line 5, column 13):\n t3: test(id: \"invalid\", c: {userId: \"test-yay\", name: \"foo\"})\n ^\n (line 5, column 26):\n t3: test(id: \"invalid\", c: {userId: \"test-yay\", name: \"foo\"})\n ^", - "path" → Vector("t3"), - "locations" → Vector(Map("line" → 5, "column" → 13), Map("line" → 5, "column" → 26))), + "message" -> "Argument 'id' has wrong value: invalid id. (line 5, column 13):\n t3: test(id: \"invalid\", c: {userId: \"test-yay\", name: \"foo\"})\n ^\n (line 5, column 26):\n t3: test(id: \"invalid\", c: {userId: \"test-yay\", name: \"foo\"})\n ^", + "path" -> Vector("t3"), + "locations" -> Vector(Map("line" -> 5, "column" -> 13), Map("line" -> 5, "column" -> 26))), Map( - "message" → "Field 'c.userId' has wrong value: invalid id. (line 6, column 13):\n t4: test(id: \"test-valid\", c: {userId: \"yay\", name: \"foo\"})\n ^\n (line 6, column 52):\n t4: test(id: \"test-valid\", c: {userId: \"yay\", name: \"foo\"})\n ^", - "path" → Vector("t4"), - "locations" → Vector(Map("line" → 6, "column" → 13), Map("line" → 6, "column" → 52)))))) + "message" -> "Field 'c.userId' has wrong value: invalid id. (line 6, column 13):\n t4: test(id: \"test-valid\", c: {userId: \"yay\", name: \"foo\"})\n ^\n (line 6, column 52):\n t4: test(id: \"test-valid\", c: {userId: \"yay\", name: \"foo\"})\n ^", + "path" -> Vector("t4"), + "locations" -> Vector(Map("line" -> 6, "column" -> 13), Map("line" -> 6, "column" -> 52)))))) } "applies to valid default values" in { @@ -165,7 +165,7 @@ class ScalarMiddlewareSpec extends WordSpec with Matchers with FutureResultSuppo Field("test", OptionType(EncodedIdType), arguments = IdArgWithValidDefault :: ComplexArgWithValidDefault :: Nil, resolve = _.withArgs(IdArgWithValidDefault, ComplexArgWithValidDefault)( - (id, complex) ⇒ id + "-" + complex("userId").asInstanceOf[Option[UserId]].get + "-" + complex("name"))) + (id, complex) => id + "-" + complex("userId").asInstanceOf[Option[UserId]].get + "-" + complex("name"))) ))) val query = @@ -182,9 +182,9 @@ class ScalarMiddlewareSpec extends WordSpec with Matchers with FutureResultSuppo Executor.execute(schema, query, ctx, middleware = middleware).await should be ( Map( - "data" → Map( - "t1" → "test-SOME_ID-INPUT_ID-bar", - "t2" → "test-ID1-ID2-foo"))) + "data" -> Map( + "t1" -> "test-SOME_ID-INPUT_ID-bar", + "t2" -> "test-ID1-ID2-foo"))) } "applies to invalid default values" in { @@ -192,7 +192,7 @@ class ScalarMiddlewareSpec extends WordSpec with Matchers with FutureResultSuppo Field("test", OptionType(EncodedIdType), arguments = IdArgWithDefault :: ComplexArgWithDefault :: Nil, resolve = _.withArgs(IdArgWithDefault, ComplexArgWithDefault)( - (id, complex) ⇒ id + "-" + complex("userId").asInstanceOf[Option[UserId]].get + "-" + complex("name"))) + (id, complex) => id + "-" + complex("userId").asInstanceOf[Option[UserId]].get + "-" + complex("name"))) ))) val query = @@ -210,27 +210,27 @@ class ScalarMiddlewareSpec extends WordSpec with Matchers with FutureResultSuppo Executor.execute(schema, query, ctx, middleware = middleware).await should be ( Map( - "data" → Map( - "t2" → null, - "t3" → null, - "t4" → null), - "errors" → Vector( + "data" -> Map( + "t2" -> null, + "t3" -> null, + "t4" -> null), + "errors" -> Vector( Map( - "message" → "Field '$id' has wrong value: invalid id. (line 2, column 22):\n query Test($id: String = \"ID1\", $c: Complex = {userId: \"ID2\", name: \"foo\"}) {\n ^\n (line 2, column 36):\n query Test($id: String = \"ID1\", $c: Complex = {userId: \"ID2\", name: \"foo\"}) {\n ^", - "path" → Vector("t2"), - "locations" → Vector(Map("line" → 2, "column" → 22), Map("line" → 2, "column" → 36))), + "message" -> "Field '$id' has wrong value: invalid id. (line 2, column 22):\n query Test($id: String = \"ID1\", $c: Complex = {userId: \"ID2\", name: \"foo\"}) {\n ^\n (line 2, column 36):\n query Test($id: String = \"ID1\", $c: Complex = {userId: \"ID2\", name: \"foo\"}) {\n ^", + "path" -> Vector("t2"), + "locations" -> Vector(Map("line" -> 2, "column" -> 22), Map("line" -> 2, "column" -> 36))), Map( - "message" → "Field '$c.userId' has wrong value: invalid id. (line 2, column 43):\n query Test($id: String = \"ID1\", $c: Complex = {userId: \"ID2\", name: \"foo\"}) {\n ^\n (line 2, column 66):\n query Test($id: String = \"ID1\", $c: Complex = {userId: \"ID2\", name: \"foo\"}) {\n ^", - "path" → Vector("t2"), - "locations" → Vector(Map("line" → 2, "column" → 43), Map("line" → 2, "column" → 66))), + "message" -> "Field '$c.userId' has wrong value: invalid id. (line 2, column 43):\n query Test($id: String = \"ID1\", $c: Complex = {userId: \"ID2\", name: \"foo\"}) {\n ^\n (line 2, column 66):\n query Test($id: String = \"ID1\", $c: Complex = {userId: \"ID2\", name: \"foo\"}) {\n ^", + "path" -> Vector("t2"), + "locations" -> Vector(Map("line" -> 2, "column" -> 43), Map("line" -> 2, "column" -> 66))), Map( - "message" → "Argument 'id' has wrong value: invalid id. (line 4, column 13):\n t3: test(c: {userId: \"test-yay\", name: \"foo\"})\n ^", - "path" → Vector("t3"), - "locations" → Vector(Map("line" → 4, "column" → 13))), + "message" -> "Argument 'id' has wrong value: invalid id. (line 4, column 13):\n t3: test(c: {userId: \"test-yay\", name: \"foo\"})\n ^", + "path" -> Vector("t3"), + "locations" -> Vector(Map("line" -> 4, "column" -> 13))), Map( - "message" → "Argument 'c.userId' has wrong value: invalid id. (line 5, column 13):\n t4: test(id: \"test-valid\", c: {name: \"foo\"})\n ^", - "path" → Vector("t4"), - "locations" → Vector(Map("line" → 5, "column" → 13)))))) + "message" -> "Argument 'c.userId' has wrong value: invalid id. (line 5, column 13):\n t4: test(id: \"test-valid\", c: {name: \"foo\"})\n ^", + "path" -> Vector("t4"), + "locations" -> Vector(Map("line" -> 5, "column" -> 13)))))) } } } diff --git a/src/test/scala/sangria/execution/UnionInterfaceSpec.scala b/src/test/scala/sangria/execution/UnionInterfaceSpec.scala index 40754cdb..78ec5277 100644 --- a/src/test/scala/sangria/execution/UnionInterfaceSpec.scala +++ b/src/test/scala/sangria/execution/UnionInterfaceSpec.scala @@ -37,7 +37,7 @@ class UnionInterfaceSpec extends WordSpec with Matchers with FutureResultSupport Field("pets", OptionType(ListType(OptionType(PetType))), resolve = _.value.pets), Field("pets2", OptionType(ListType(OptionType(Pet2Type))), resolve = _.value.eitherPets), Field("favouritePet", PetType, resolve = _.value.pets.flatMap(_.headOption.flatMap(identity)).get), - Field("favouritePetList", ListType(PetType), resolve = _.value.pets.getOrElse(Nil).flatMap(x ⇒ x).toSeq), + Field("favouritePetList", ListType(PetType), resolve = _.value.pets.getOrElse(Nil).flatMap(x => x).toSeq), Field("favouritePetOpt", OptionType(PetType), resolve = _.value.pets.flatMap(_.headOption.flatMap(identity))), Field("friends", OptionType(ListType(OptionType(NamedType))), resolve = _.value.friends))) @@ -76,33 +76,33 @@ class UnionInterfaceSpec extends WordSpec with Matchers with FutureResultSupport } """, Map( - "data" → Map( - "Named" → Map( - "kind" → "INTERFACE", - "name" → "Named", - "fields" → List( - Map("name" → "name") + "data" -> Map( + "Named" -> Map( + "kind" -> "INTERFACE", + "name" -> "Named", + "fields" -> List( + Map("name" -> "name") ), - "interfaces" → null, - "possibleTypes" → List( - Map("name" → "Cat"), - Map("name" → "Dog"), - Map("name" → "Person") + "interfaces" -> null, + "possibleTypes" -> List( + Map("name" -> "Cat"), + Map("name" -> "Dog"), + Map("name" -> "Person") ), - "enumValues" → null, - "inputFields" → null + "enumValues" -> null, + "inputFields" -> null ), - "Pet" → Map( - "kind" → "UNION", - "name" → "Pet", - "fields" → null, - "interfaces" → null, - "possibleTypes" → List( - Map("name" → "Dog"), - Map("name" → "Cat") + "Pet" -> Map( + "kind" -> "UNION", + "name" -> "Pet", + "fields" -> null, + "interfaces" -> null, + "possibleTypes" -> List( + Map("name" -> "Dog"), + Map("name" -> "Cat") ), - "enumValues" → null, - "inputFields" → null + "enumValues" -> null, + "inputFields" -> null ) ) ) @@ -125,14 +125,14 @@ class UnionInterfaceSpec extends WordSpec with Matchers with FutureResultSupport } """, Map( - "data" → Map( - "__typename" → "Person", - "name" → "Bob", - "favouritePet" → Map("name" → "Garfield"), - "favouritePetOpt" → Map("name" → "Garfield"), - "pets" → List( - Map("__typename" → "Cat", "name" → "Garfield", "meows" → false), - Map("__typename" → "Dog", "name" → "Odie", "barks" → true) + "data" -> Map( + "__typename" -> "Person", + "name" -> "Bob", + "favouritePet" -> Map("name" -> "Garfield"), + "favouritePetOpt" -> Map("name" -> "Garfield"), + "pets" -> List( + Map("__typename" -> "Cat", "name" -> "Garfield", "meows" -> false), + Map("__typename" -> "Dog", "name" -> "Odie", "barks" -> true) ) ) ) , @@ -162,18 +162,18 @@ class UnionInterfaceSpec extends WordSpec with Matchers with FutureResultSupport } """, Map( - "data" → Map( - "__typename" → "Person", - "name" → "Bob", - "favouritePet" → Map("name" → "Garfield"), - "favouritePetOpt" → Map("name" → "Garfield"), - "pets" → List( - Map("__typename" → "Cat", "name" → "Garfield", "meows" → false), - Map("__typename" → "Dog", "name" → "Odie", "barks" → true) + "data" -> Map( + "__typename" -> "Person", + "name" -> "Bob", + "favouritePet" -> Map("name" -> "Garfield"), + "favouritePetOpt" -> Map("name" -> "Garfield"), + "pets" -> List( + Map("__typename" -> "Cat", "name" -> "Garfield", "meows" -> false), + Map("__typename" -> "Dog", "name" -> "Odie", "barks" -> true) ), - "pets2" → List( - Map("__typename" → "Cat", "name" → "Garfield", "meows" → false), - Map("__typename" → "Dog", "name" → "Odie", "barks" → true) + "pets2" -> List( + Map("__typename" -> "Cat", "name" -> "Garfield", "meows" -> false), + Map("__typename" -> "Dog", "name" -> "Odie", "barks" -> true) ) ) ) , @@ -200,12 +200,12 @@ class UnionInterfaceSpec extends WordSpec with Matchers with FutureResultSupport } """, Map( - "data" → Map( - "__typename" → "Person", - "name" → "Bob", - "pets" → List( - Map("__typename" → "Cat", "name" → "Garfield", "meows" → false), - Map("__typename" → "Dog", "name" → "Odie", "barks" → true) + "data" -> Map( + "__typename" -> "Person", + "name" -> "Bob", + "pets" -> List( + Map("__typename" -> "Cat", "name" -> "Garfield", "meows" -> false), + Map("__typename" -> "Dog", "name" -> "Odie", "barks" -> true) ) ) ) @@ -226,12 +226,12 @@ class UnionInterfaceSpec extends WordSpec with Matchers with FutureResultSupport } """, Map( - "data" → Map( - "__typename" → "Person", - "name" → "Bob", - "friends" → List( - Map("__typename" → "Person", "name" → "Liz"), - Map("__typename" → "Dog", "name" → "Odie", "barks" → true) + "data" -> Map( + "__typename" -> "Person", + "name" -> "Bob", + "friends" -> List( + Map("__typename" -> "Person", "name" -> "Liz"), + Map("__typename" -> "Dog", "name" -> "Odie", "barks" -> true) ) ) ), @@ -257,12 +257,12 @@ class UnionInterfaceSpec extends WordSpec with Matchers with FutureResultSupport } """, Map( - "data" → Map( - "__typename" → "Person", - "name" → "Bob", - "friends" → List( - Map("__typename" → "Person", "name" → "Liz"), - Map("__typename" → "Dog", "name" → "Odie", "barks" → true) + "data" -> Map( + "__typename" -> "Person", + "name" -> "Bob", + "friends" -> List( + Map("__typename" -> "Person", "name" -> "Liz"), + Map("__typename" -> "Dog", "name" -> "Odie", "barks" -> true) ) ) ) @@ -302,16 +302,16 @@ class UnionInterfaceSpec extends WordSpec with Matchers with FutureResultSupport } """, Map( - "data" → Map( - "__typename" → "Person", - "name" → "Bob", - "pets" → List( - Map("__typename" → "Cat", "name" → "Garfield", "meows" → false), - Map("__typename" → "Dog", "name" → "Odie", "barks" → true) + "data" -> Map( + "__typename" -> "Person", + "name" -> "Bob", + "pets" -> List( + Map("__typename" -> "Cat", "name" -> "Garfield", "meows" -> false), + Map("__typename" -> "Dog", "name" -> "Odie", "barks" -> true) ), - "friends" → List( - Map("__typename" → "Person", "name" → "Liz"), - Map("__typename" → "Dog", "name" → "Odie", "barks" → true) + "friends" -> List( + Map("__typename" -> "Person", "name" -> "Liz"), + Map("__typename" -> "Dog", "name" -> "Odie", "barks" -> true) ) ) ) @@ -334,9 +334,9 @@ class UnionInterfaceSpec extends WordSpec with Matchers with FutureResultSupport val BazType = ObjectType("Baz", fields[Unit, Baz]( Field("quz", OptionType(ListType(OptionType(QuzType))), arguments = Argument("id", OptionInputType(ListInputType(StringType))) :: Nil, - resolve = c ⇒ { + resolve = c => { c.argOpt[Seq[String]]("id") - .map(queried ⇒ c.value.quz.filter(quz ⇒ queried.contains(quz.id))) + .map(queried => c.value.quz.filter(quz => queried.contains(quz.id))) .getOrElse(c.value.quz) .map(Some(_)) }))) @@ -379,21 +379,21 @@ class UnionInterfaceSpec extends WordSpec with Matchers with FutureResultSupport Executor.execute(schema, query, root = data).await should be ( Map( - "data" → Map( - "foo" → Vector( + "data" -> Map( + "foo" -> Vector( Map( - "__typename" → "Foo", - "baz" → Map( - "quz" → Vector( - Map("id" → "one")))), - Map("__typename" → "Baz"), + "__typename" -> "Foo", + "baz" -> Map( + "quz" -> Vector( + Map("id" -> "one")))), + Map("__typename" -> "Baz"), Map( - "__typename" → "Bar", - "baz" → Map( - "quz" → Vector( + "__typename" -> "Bar", + "baz" -> Map( + "quz" -> Vector( Map( - "id" → "two", - "i" → 2)))))))) + "id" -> "two", + "i" -> 2)))))))) } } -} \ No newline at end of file +} diff --git a/src/test/scala/sangria/execution/ValueCoercionHelperSpec.scala b/src/test/scala/sangria/execution/ValueCoercionHelperSpec.scala index cbc5377e..f6c109f8 100644 --- a/src/test/scala/sangria/execution/ValueCoercionHelperSpec.scala +++ b/src/test/scala/sangria/execution/ValueCoercionHelperSpec.scala @@ -111,8 +111,8 @@ class ValueCoercionHelperSpec extends WordSpec with Matchers { check(opt(testInputObj), "null", Some(None)) check(opt(testInputObj), "123", None) check(opt(testInputObj), "[]", None) - check(opt(testInputObj), "{ int: 123, requiredBool: false }", Some(Some(Map("int" → Some(123), "requiredBool" → false)))) - check(opt(testInputObj), "{ bool: true, requiredBool: false }", Some(Some(Map("int" → Some(42), "bool" → Some(true), "requiredBool" → false)))) + check(opt(testInputObj), "{ int: 123, requiredBool: false }", Some(Some(Map("int" -> Some(123), "requiredBool" -> false)))) + check(opt(testInputObj), "{ bool: true, requiredBool: false }", Some(Some(Map("int" -> Some(42), "bool" -> Some(true), "requiredBool" -> false)))) check(opt(testInputObj), "{ int: true, requiredBool: true }", None) check(opt(testInputObj), "{ requiredBool: null }", None) check(opt(testInputObj), "{ bool: true }", None) @@ -120,39 +120,39 @@ class ValueCoercionHelperSpec extends WordSpec with Matchers { "accepts variable values assuming already coerced" in { check(opt(BooleanType), "$var", None) - check(opt(BooleanType), "$var", Some(Some(true)), "$var: Boolean" → """{"var": true}""") - check(opt(BooleanType), "$var", Some(None), "$var: Boolean" → """{"var": null}""") + check(opt(BooleanType), "$var", Some(Some(true)), "$var: Boolean" -> """{"var": true}""") + check(opt(BooleanType), "$var", Some(None), "$var: Boolean" -> """{"var": null}""") } "asserts variables are provided as items in lists" in { check(listOfBool, "[ $foo ]", Some(Some(List(None)))) check(listOfNonNullBool, "[ $foo ]", None) - check(listOfNonNullBool, "[ $foo ]", Some(Some(List(true))), "$foo: Boolean!" → """{"foo": true}""") - check(listOfNonNullBool, "$foo", Some(Some(List(true))), "$foo: [Boolean!]" → """{"foo": true}""") - check(listOfNonNullBool, "$foo", Some(Some(List(true))), "$foo: [Boolean!]" → """{"foo": [true]}""") + check(listOfNonNullBool, "[ $foo ]", Some(Some(List(true))), "$foo: Boolean!" -> """{"foo": true}""") + check(listOfNonNullBool, "$foo", Some(Some(List(true))), "$foo: [Boolean!]" -> """{"foo": true}""") + check(listOfNonNullBool, "$foo", Some(Some(List(true))), "$foo: [Boolean!]" -> """{"foo": [true]}""") } "omits input object fields for unprovided variables" in { check(opt(testInputObj), "{ int: $foo, bool: $foo, requiredBool: true }", - Some(Some(Map("int" → Some(42), "requiredBool" → true)))) + Some(Some(Map("int" -> Some(42), "requiredBool" -> true)))) check(opt(testInputObj), "{ int: $foo, bool: $foo, requiredBool: true }", - Some(Some(Map("int" → None, "bool" → None, "requiredBool" → true))), - "$foo: Boolean" → """{"foo": null}""") + Some(Some(Map("int" -> None, "bool" -> None, "requiredBool" -> true))), + "$foo: Boolean" -> """{"foo": null}""") check(opt(testInputObj), "{ requiredBool: $foo }", None) check(opt(testInputObj), "{ bool: $foo, requiredBool: $foo }", - Some(Some(Map("int" → Some(42), "bool" → Some(true), "requiredBool" → true))), - "$foo: Boolean" → """{"foo": true}""") + Some(Some(Map("int" -> Some(42), "bool" -> Some(true), "requiredBool" -> true))), + "$foo: Boolean" -> """{"foo": true}""") check(opt(testInputObj), "$foo", - Some(Some(Map("int" → Some(42), "requiredBool" → true))), - "$foo: TestInput" → """{"foo": {"requiredBool": true}}""") + Some(Some(Map("int" -> Some(42), "requiredBool" -> true))), + "$foo: TestInput" -> """{"foo": {"requiredBool": true}}""") check(opt(testInputObj), "$foo", - Some(Some(Map("int" → Some(42), "bool" → None, "requiredBool" → true))), - "$foo: TestInput" → """{"foo": {"bool": null, "requiredBool": true}}""") + Some(Some(Map("int" -> Some(42), "bool" -> None, "requiredBool" -> true))), + "$foo: TestInput" -> """{"foo": {"bool": null, "requiredBool": true}}""") } } @@ -181,7 +181,7 @@ class ValueCoercionHelperSpec extends WordSpec with Matchers { args.raw.get("a") } - def check[T](tpe: InputType[T], value: String, result: Any, vars: (String, String) = "" → "")(implicit fromInput: FromInput[T]) = + def check[T](tpe: InputType[T], value: String, result: Any, vars: (String, String) = "" -> "")(implicit fromInput: FromInput[T]) = coerceInputValue(tpe, value, vars) should be (result) def cls[T : ClassTag] = implicitly[ClassTag[T]].runtimeClass diff --git a/src/test/scala/sangria/execution/VariablesSpec.scala b/src/test/scala/sangria/execution/VariablesSpec.scala index 30b69311..4babf743 100644 --- a/src/test/scala/sangria/execution/VariablesSpec.scala +++ b/src/test/scala/sangria/execution/VariablesSpec.scala @@ -25,34 +25,34 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fields[Unit, Unit]( Field("fieldWithObjectInput", OptionType(StringType), arguments = Argument("input", OptionInputType(TestInputObject)) :: Nil, - resolve = ctx ⇒ ctx.argOpt[Any]("input") map (DefaultValueRenderer.renderCoercedInputValueCompact(_, OptionInputType(TestInputObject)))), + resolve = ctx => ctx.argOpt[Any]("input") map (DefaultValueRenderer.renderCoercedInputValueCompact(_, OptionInputType(TestInputObject)))), Field("fieldWithNullableStringInput", OptionType(StringType), arguments = Argument("input", OptionInputType(StringType)) :: Nil, - resolve = ctx ⇒ ctx.argOpt[Any]("input") map (DefaultValueRenderer.renderCoercedInputValueCompact(_, OptionInputType(StringType)))), + resolve = ctx => ctx.argOpt[Any]("input") map (DefaultValueRenderer.renderCoercedInputValueCompact(_, OptionInputType(StringType)))), Field("fieldWithNullableStringInputDefined", BooleanType, arguments = Argument("input", OptionInputType(StringType)) :: Nil, - resolve = ctx ⇒ ctx.argDefinedInQuery("input")), + resolve = ctx => ctx.argDefinedInQuery("input")), Field("fieldWithNonNullableStringInput", OptionType(StringType), arguments = Argument("input", StringType) :: Nil, - resolve = ctx ⇒ DefaultValueRenderer.renderCoercedInputValueCompact(ctx.arg[Any]("input"), StringType)), + resolve = ctx => DefaultValueRenderer.renderCoercedInputValueCompact(ctx.arg[Any]("input"), StringType)), Field("fieldWithDefaultArgumentValue", OptionType(StringType), arguments = Argument("input", OptionInputType(StringType), defaultValue = "Hello World") :: Nil, - resolve = ctx ⇒ DefaultValueRenderer.renderCoercedInputValueCompact(ctx.arg[Any]("input"), OptionInputType(StringType))), + resolve = ctx => DefaultValueRenderer.renderCoercedInputValueCompact(ctx.arg[Any]("input"), OptionInputType(StringType))), Field("fieldWithNonNullableStringInputAndDefaultArgumentValue", OptionType(StringType), arguments = Argument("input", StringType, defaultValue = "Hello World") :: Nil, - resolve = ctx ⇒ DefaultValueRenderer.renderCoercedInputValueCompact(ctx.arg[Any]("input"), OptionInputType(StringType))), + resolve = ctx => DefaultValueRenderer.renderCoercedInputValueCompact(ctx.arg[Any]("input"), OptionInputType(StringType))), Field("list", OptionType(StringType), arguments = Argument("input", OptionInputType(ListInputType(OptionInputType(StringType)))) :: Nil, - resolve = ctx ⇒ ctx.argOpt[Any]("input") map (DefaultValueRenderer.renderCoercedInputValueCompact(_, OptionInputType(ListInputType(OptionInputType(StringType)))))), + resolve = ctx => ctx.argOpt[Any]("input") map (DefaultValueRenderer.renderCoercedInputValueCompact(_, OptionInputType(ListInputType(OptionInputType(StringType)))))), Field("nnList", OptionType(StringType), arguments = Argument("input", ListInputType(OptionInputType(StringType))) :: Nil, - resolve = ctx ⇒ DefaultValueRenderer.renderCoercedInputValueCompact(ctx.arg[Any]("input"), ListInputType(OptionInputType(StringType)))), + resolve = ctx => DefaultValueRenderer.renderCoercedInputValueCompact(ctx.arg[Any]("input"), ListInputType(OptionInputType(StringType)))), Field("listNN", OptionType(StringType), arguments = Argument("input", OptionInputType(ListInputType(StringType))) :: Nil, - resolve = ctx ⇒ ctx.argOpt[Any]("input") map (DefaultValueRenderer.renderCoercedInputValueCompact(_, OptionInputType(ListInputType(StringType))))), + resolve = ctx => ctx.argOpt[Any]("input") map (DefaultValueRenderer.renderCoercedInputValueCompact(_, OptionInputType(ListInputType(StringType))))), Field("nnListNN", OptionType(StringType), arguments = Argument("input", ListInputType(StringType)) :: Nil, - resolve = ctx ⇒ DefaultValueRenderer.renderCoercedInputValueCompact(ctx.arg[Any]("input"), ListInputType(StringType))) + resolve = ctx => DefaultValueRenderer.renderCoercedInputValueCompact(ctx.arg[Any]("input"), ListInputType(StringType))) ) }) @@ -67,8 +67,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithObjectInput(input: null) } """, - Map("data" → Map( - "fieldWithObjectInput" → null + Map("data" -> Map( + "fieldWithObjectInput" -> null ))) "uses undefined when variable not provided" in check((), @@ -79,9 +79,9 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { } """, args = JsObject(/* Intentionally missing variable values. */), - expected = Map("data" → Map( - "fieldWithNullableStringInput" → null, - "fieldWithNullableStringInputDefined" → false + expected = Map("data" -> Map( + "fieldWithNullableStringInput" -> null, + "fieldWithNullableStringInputDefined" -> false ))) "uses null when variable provided explicit null value" in check((), @@ -91,10 +91,10 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithNullableStringInputDefined(input: $input) } """, - args = JsObject("input" → JsNull), - expected = Map("data" → Map( - "fieldWithNullableStringInput" → null, - "fieldWithNullableStringInputDefined" → true + args = JsObject("input" -> JsNull), + expected = Map("data" -> Map( + "fieldWithNullableStringInput" -> null, + "fieldWithNullableStringInputDefined" -> true ))) "does not use default value when provided" in check((), @@ -103,9 +103,9 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithNullableStringInput(input: $input) } """, - args = JsObject("input" → JsString("Variable value")), - expected = Map("data" → Map( - "fieldWithNullableStringInput" → "\"Variable value\"" + args = JsObject("input" -> JsString("Variable value")), + expected = Map("data" -> Map( + "fieldWithNullableStringInput" -> "\"Variable value\"" ))) "uses explicit null value instead of default value" in check((), @@ -115,10 +115,10 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithNullableStringInputDefined(input: $input) } """, - args = JsObject("input" → JsNull), - expected = Map("data" → Map( - "fieldWithNullableStringInput" → null, - "fieldWithNullableStringInputDefined" → true + args = JsObject("input" -> JsNull), + expected = Map("data" -> Map( + "fieldWithNullableStringInput" -> null, + "fieldWithNullableStringInputDefined" -> true ))) "uses null default value when not provided" in check((), @@ -129,9 +129,9 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { } """, args = JsObject(/* Intentionally missing variable values. */), - expected = Map("data" → Map( - "fieldWithNullableStringInput" → null, - "fieldWithNullableStringInputDefined" → true + expected = Map("data" -> Map( + "fieldWithNullableStringInput" -> null, + "fieldWithNullableStringInputDefined" -> true ))) "when no runtime value is provided to a non-null argument" in check((), @@ -140,8 +140,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithNonNullableStringInputAndDefaultArgumentValue(input: $optional) } """, - Map("data" → Map( - "fieldWithNonNullableStringInputAndDefaultArgumentValue" → "\"Hello World\"" + Map("data" -> Map( + "fieldWithNonNullableStringInputAndDefaultArgumentValue" -> "\"Hello World\"" ))) "executes with complex input" in check((), @@ -150,8 +150,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithObjectInput(input: {a: "foo", b: ["bar"], c: "baz"}) } """, - Map("data" → Map( - "fieldWithObjectInput" → """{a:"foo",b:["bar"],c:"baz"}""" + Map("data" -> Map( + "fieldWithObjectInput" -> """{a:"foo",b:["bar"],c:"baz"}""" ))) "executes with complex input containing nulls in object fields" in check((), @@ -160,8 +160,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithObjectInput(input: {a: null, b: ["bar"], c: "baz"}) } """, - Map("data" → Map( - "fieldWithObjectInput" → """{a:null,b:["bar"],c:"baz"}""" + Map("data" -> Map( + "fieldWithObjectInput" -> """{a:null,b:["bar"],c:"baz"}""" ))) "executes with complex input containing nulls in list values inside of complex objects" in check((), @@ -170,8 +170,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithObjectInput(input: {a: "foo", b: ["bar", null, "test"], c: "baz"}) } """, - Map("data" → Map( - "fieldWithObjectInput" → """{a:"foo",b:["bar",null,"test"],c:"baz"}""" + Map("data" -> Map( + "fieldWithObjectInput" -> """{a:"foo",b:["bar",null,"test"],c:"baz"}""" ))) "executes with complex input containing nulls in list values" in check((), @@ -180,8 +180,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { nnList(input: ["a1", null, "b1"]) } """, - Map("data" → Map( - "nnList" → """["a1",null,"b1"]""" + Map("data" -> Map( + "nnList" -> """["a1",null,"b1"]""" ))) "does not allow null literals in not-null lists" in checkContainsErrors( @@ -192,7 +192,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { } """, null, - List("Expected type 'String!', found 'null'." → Seq(Pos(3, 38)))) + List("Expected type 'String!', found 'null'." -> Seq(Pos(3, 38)))) "does not allow null literals in not-null fields in complex objects" in checkContainsErrors( (), @@ -202,7 +202,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { } """, null, - List("Expected type 'String!', found 'null'." → Seq(Pos(3, 57)))) + List("Expected type 'String!', found 'null'." -> Seq(Pos(3, 57)))) "does not allow null literals in not-null arguments" in checkContainsErrors( (), @@ -212,7 +212,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { } """, null, - List("Expected type '[String!]!', found 'null'." → Seq(Pos(3, 31)))) + List("Expected type '[String!]!', found 'null'." -> Seq(Pos(3, 31)))) "does not allow null literals in not-null lists inside of complex objects" in checkContainsErrors( (), @@ -222,7 +222,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { } """, null, - List("Expected type 'String!', found 'null'." → Seq(Pos(3, 74)))) + List("Expected type 'String!', found 'null'." -> Seq(Pos(3, 74)))) "executes with complex input containing undefined object fields" in check((), """ @@ -230,8 +230,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithObjectInput(input: {b: ["bar"], c: "baz"}) } """, - Map("data" → Map( - "fieldWithObjectInput" → """{b:["bar"],c:"baz"}"""))) + Map("data" -> Map( + "fieldWithObjectInput" -> """{b:["bar"],c:"baz"}"""))) "properly coerces single value to array" in check((), """ @@ -239,8 +239,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithObjectInput(input: {a: "foo", b: "bar", c: "baz"}) } """, - Map("data" → Map( - "fieldWithObjectInput" → """{a:"foo",b:["bar"],c:"baz"}"""))) + Map("data" -> Map( + "fieldWithObjectInput" -> """{a:"foo",b:["bar"],c:"baz"}"""))) "properly parses null value to null" in check((), """ @@ -248,8 +248,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithObjectInput(input: {a: null, b: null, c: "C", d: null}) } """, - Map("data" → Map( - "fieldWithObjectInput" → """{a:null,b:null,c:"C",d:null}"""))) + Map("data" -> Map( + "fieldWithObjectInput" -> """{a:null,b:null,c:"C",d:null}"""))) "properly parses null value in list" in check((), """ @@ -257,8 +257,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithObjectInput(input: {b: ["A",null,"C"], c: "C"}) } """, - Map("data" → Map( - "fieldWithObjectInput" → """{b:["A",null,"C"],c:"C"}"""))) + Map("data" -> Map( + "fieldWithObjectInput" -> """{b:["A",null,"C"],c:"C"}"""))) "does not use incorrect value" in checkContainsErrors( (), @@ -267,8 +267,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithObjectInput(input: ["foo", "bar", "baz"]) } """, - Map("fieldWithObjectInput" → null), - List("""Value '["foo","bar","baz"]' of wrong type was provided to the field of type 'TestInputObject!' at path 'input'.""" → List(Pos(3, 15), Pos(3, 43))), + Map("fieldWithObjectInput" -> null), + List("""Value '["foo","bar","baz"]' of wrong type was provided to the field of type 'TestInputObject!' at path 'input'.""" -> List(Pos(3, 15), Pos(3, 43))), validateQuery = false ) } @@ -282,18 +282,18 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { """) "executes with complex input (scala input)" in { - val args = Map("input" → Map("a" → "foo", "b" → List("bar"), "c" → "baz")) + val args = Map("input" -> Map("a" -> "foo", "b" -> List("bar"), "c" -> "baz")) - Executor.execute(schema, testQuery, variables = mapVars(args)).await should be (Map("data" → Map( - "fieldWithObjectInput" → """{a:"foo",b:["bar"],c:"baz"}""" + Executor.execute(schema, testQuery, variables = mapVars(args)).await should be (Map("data" -> Map( + "fieldWithObjectInput" -> """{a:"foo",b:["bar"],c:"baz"}""" ))) } "executes with complex input (json input)" in { val args = """{"input": {"a": "foo", "b": ["bar"], "c": "baz"}}""".parseJson - Executor.execute(schema, testQuery, variables = args).await should be (Map("data" → Map( - "fieldWithObjectInput" → """{a:"foo",b:["bar"],c:"baz"}""" + Executor.execute(schema, testQuery, variables = args).await should be (Map("data" -> Map( + "fieldWithObjectInput" -> """{a:"foo",b:["bar"],c:"baz"}""" ))) } @@ -303,23 +303,23 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithObjectInput(input: $input) } """, - Map("data" → Map( - "fieldWithObjectInput" → """{a:"foo",b:["bar"],c:"baz"}""" + Map("data" -> Map( + "fieldWithObjectInput" -> """{a:"foo",b:["bar"],c:"baz"}""" ))) "properly coerces single value to array (scala input)" in { - val args = Map("input" → Map("a" → "foo", "b" → "bar", "c" → "baz")) + val args = Map("input" -> Map("a" -> "foo", "b" -> "bar", "c" -> "baz")) - Executor.execute(schema, testQuery, variables = mapVars(args)).await should be (Map("data" → Map( - "fieldWithObjectInput" → """{a:"foo",b:["bar"],c:"baz"}""" + Executor.execute(schema, testQuery, variables = mapVars(args)).await should be (Map("data" -> Map( + "fieldWithObjectInput" -> """{a:"foo",b:["bar"],c:"baz"}""" ))) } "properly coerces single value to array (json input)" in { val args = """{"input": {"a": "foo", "b": "bar", "c": "baz"}}""".parseJson - Executor.execute(schema, testQuery, variables = args).await should be (Map("data" → Map( - "fieldWithObjectInput" → """{a:"foo",b:["bar"],c:"baz"}""" + Executor.execute(schema, testQuery, variables = args).await should be (Map("data" -> Map( + "fieldWithObjectInput" -> """{a:"foo",b:["bar"],c:"baz"}""" ))) } @@ -362,8 +362,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithNullableStringInput } """, - Map("data" → Map( - "fieldWithNullableStringInput" → null + Map("data" -> Map( + "fieldWithNullableStringInput" -> null ))) "allows nullable inputs to be omitted in a variable" in check((), @@ -372,8 +372,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithNullableStringInput(input: $value) } """, - Map("data" → Map( - "fieldWithNullableStringInput" → null + Map("data" -> Map( + "fieldWithNullableStringInput" -> null ))) "allows nullable inputs to be omitted in an unlisted variable" in check((), @@ -382,14 +382,14 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithNullableStringInput(input: $value) } """, - Map("data" → Map( - "fieldWithNullableStringInput" → null + Map("data" -> Map( + "fieldWithNullableStringInput" -> null )), validateQuery = false ) "allows nullable inputs to be set to null in a variable" in { - val args = mapVars("value" → null) + val args = mapVars("value" -> null) val Success(query) = QueryParser.parse( """ @@ -398,13 +398,13 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { } """) - Executor.execute(schema, query, variables = args).await should be (Map("data" → Map( - "fieldWithNullableStringInput" → null + Executor.execute(schema, query, variables = args).await should be (Map("data" -> Map( + "fieldWithNullableStringInput" -> null ))) } "allows nullable inputs to be set to a value in a variable" in { - val args = mapVars("value" → "a") + val args = mapVars("value" -> "a") val Success(query) = QueryParser.parse( """ @@ -413,8 +413,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { } """) - Executor.execute(schema, query, variables = args).await should be (Map("data" → Map( - "fieldWithNullableStringInput" → "\"a\"" + Executor.execute(schema, query, variables = args).await should be (Map("data" -> Map( + "fieldWithNullableStringInput" -> "\"a\"" ))) } @@ -424,8 +424,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithNullableStringInput(input: "a") } """, - Map("data" → Map( - "fieldWithNullableStringInput" → "\"a\"" + Map("data" -> Map( + "fieldWithNullableStringInput" -> "\"a\"" ))) } @@ -436,8 +436,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithNonNullableStringInput(input: $value) } """, - Map("data" → Map( - "fieldWithNonNullableStringInput" → "\"default\"" + Map("data" -> Map( + "fieldWithNonNullableStringInput" -> "\"default\"" ))) "does not allow non-nullable inputs to be omitted in a variable" in checkContainsErrors((), @@ -447,7 +447,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { } """, null, - List("""Variable '$value' expected value of type 'String!' but value is undefined.""" → List(Pos(2, 33)))) + List("""Variable '$value' expected value of type 'String!' but value is undefined.""" -> List(Pos(2, 33)))) "does not allow non-nullable inputs to be set to null in a variable" in checkContainsErrors((), """ @@ -456,7 +456,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { } """, null, - List("""Variable '$value' expected value of type 'String!' but got: null.""" → List(Pos(2, 33))), + List("""Variable '$value' expected value of type 'String!' but got: null.""" -> List(Pos(2, 33))), """{"value": null}""".parseJson ) @@ -466,8 +466,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithNonNullableStringInput(input: $value) } """, - Map("data" → Map( - "fieldWithNonNullableStringInput" → "\"a\"" + Map("data" -> Map( + "fieldWithNonNullableStringInput" -> "\"a\"" )), """{"value": "a"}""".parseJson ) @@ -478,8 +478,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithNonNullableStringInput(input: "a") } """, - Map("data" → Map( - "fieldWithNonNullableStringInput" → "\"a\"" + Map("data" -> Map( + "fieldWithNonNullableStringInput" -> "\"a\"" ))) "passes along null for non-nullable inputs if explicitly set in the query" in checkContainsErrors((), @@ -488,8 +488,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithNonNullableStringInput } """, - Map("fieldWithNonNullableStringInput" → null), - List("Null value was provided for the NotNull Type 'String!' at path 'input'." → Seq(Pos(3, 13))), + Map("fieldWithNonNullableStringInput" -> null), + List("Null value was provided for the NotNull Type 'String!' at path 'input'." -> Seq(Pos(3, 13))), validateQuery = false) // Note: this test would typically fail validation before encountering @@ -503,8 +503,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithNonNullableStringInput(input: $foo) } """, - Map("fieldWithNonNullableStringInput" → null), - List("Null value was provided for the NotNull Type 'String!' at path 'input'." → Seq(Pos(3, 13))), + Map("fieldWithNonNullableStringInput" -> null), + List("Null value was provided for the NotNull Type 'String!' at path 'input'." -> Seq(Pos(3, 13))), validateQuery = false) } @@ -515,7 +515,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { list(input: $input) } """, - Map("data" → Map("list" → null)), + Map("data" -> Map("list" -> null)), """{"input": null}""".parseJson ) @@ -525,7 +525,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { list(input: $input) } """, - Map("data" → Map("list" → "[\"A\"]")), + Map("data" -> Map("list" -> "[\"A\"]")), """{"input": ["A"]}""".parseJson ) @@ -535,7 +535,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { list(input: $input) } """, - Map("data" → Map("list" → "[\"A\",null,\"B\"]")), + Map("data" -> Map("list" -> "[\"A\",null,\"B\"]")), """{"input": ["A", null, "B"]}""".parseJson ) @@ -546,7 +546,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { } """, null, - List("""Variable '$input' expected value of type '[String]!' but got: null.""" → List(Pos(2, 19))), + List("""Variable '$input' expected value of type '[String]!' but got: null.""" -> List(Pos(2, 19))), """{"input": null}""".parseJson ) @@ -556,7 +556,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { nnList(input: $input) } """, - Map("data" → Map("nnList" → "[\"A\"]")), + Map("data" -> Map("nnList" -> "[\"A\"]")), """{"input": ["A"]}""".parseJson ) @@ -566,7 +566,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { nnList(input: $input) } """, - Map("data" → Map("nnList" → "[\"A\",null,\"B\"]")), + Map("data" -> Map("nnList" -> "[\"A\",null,\"B\"]")), """{"input": ["A",null,"B"]}""".parseJson ) @@ -576,7 +576,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { listNN(input: $input) } """, - Map("data" → Map("listNN" → null)), + Map("data" -> Map("listNN" -> null)), """{"input": null}""".parseJson ) @@ -586,7 +586,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { listNN(input: $input) } """, - Map("data" → Map("listNN" → "[\"A\"]")), + Map("data" -> Map("listNN" -> "[\"A\"]")), """{"input": ["A"]}""".parseJson ) @@ -597,7 +597,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { } """, null, - List("""Variable '$input' expected value of type '[String!]' but got: ["A",null,"B"].""" → List(Pos(2, 19))), + List("""Variable '$input' expected value of type '[String!]' but got: ["A",null,"B"].""" -> List(Pos(2, 19))), """{"input": ["A",null,"B"]}""".parseJson ) @@ -607,8 +607,8 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { nnListNN(input: $input) } """, - Map("nnListNN" → null), - List("Null value was provided for the NotNull Type '[String!]!' at path 'input'." → Seq(Pos(3, 13))), + Map("nnListNN" -> null), + List("Null value was provided for the NotNull Type '[String!]!' at path 'input'." -> Seq(Pos(3, 13))), """{"input": null}""".parseJson, validateQuery = false ) @@ -619,7 +619,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { nnListNN(input: $input) } """, - Map("data" → Map("nnListNN" → "[\"A\"]")), + Map("data" -> Map("nnListNN" -> "[\"A\"]")), """{"input": ["A"]}""".parseJson ) @@ -630,7 +630,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { } """, null, - List("""Variable '$input' expected value of type '[String!]!' but got: ["A",null,"B"].""" → List(Pos(2, 19))), + List("""Variable '$input' expected value of type '[String!]!' but got: ["A",null,"B"].""" -> List(Pos(2, 19))), """{"input": ["A",null,"B"]}""".parseJson ) @@ -641,7 +641,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { } """, null, - List("""Variable 'TestType!' expected value of type '$input' which cannot be used as an input type.""" → List(Pos(2, 19))), + List("""Variable 'TestType!' expected value of type '$input' which cannot be used as an input type.""" -> List(Pos(2, 19))), """{"input": ["A", "B"]}""".parseJson, validateQuery = false ) @@ -653,7 +653,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { } """, null, - List("""Variable 'UnknownType!' expected value of type '$input' which cannot be used as an input type.""" → List(Pos(2, 19))), + List("""Variable 'UnknownType!' expected value of type '$input' which cannot be used as an input type.""" -> List(Pos(2, 19))), """{"input": "whoknows"}""".parseJson, validateQuery = false ) @@ -666,7 +666,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithDefaultArgumentValue } """, - Map("data" → Map("fieldWithDefaultArgumentValue" → "\"Hello World\""))) + Map("data" -> Map("fieldWithDefaultArgumentValue" -> "\"Hello World\""))) "when nullable variable provided" in check((), """ @@ -674,7 +674,7 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { fieldWithDefaultArgumentValue(input: $optional) } """, - Map("data" → Map("fieldWithDefaultArgumentValue" → "\"Hello World\"")) , + Map("data" -> Map("fieldWithDefaultArgumentValue" -> "\"Hello World\"")) , validateQuery = false ) @@ -685,12 +685,12 @@ class VariablesSpec extends WordSpec with Matchers with GraphQlSupport { } """, Map( - "data" → Map("fieldWithDefaultArgumentValue" → null), - "errors" → Vector( + "data" -> Map("fieldWithDefaultArgumentValue" -> null), + "errors" -> Vector( Map( - "message" → "Argument 'input' has wrong value: Invalid value.", - "path" → Vector("fieldWithDefaultArgumentValue"), - "locations" → Vector(Map("line" → 3, "column" → 13), Map("line" → 3, "column" → 50))))), + "message" -> "Argument 'input' has wrong value: Invalid value.", + "path" -> Vector("fieldWithDefaultArgumentValue"), + "locations" -> Vector(Map("line" -> 3, "column" -> 13), Map("line" -> 3, "column" -> 50))))), validateQuery = false ) } diff --git a/src/test/scala/sangria/execution/batch/BatchExecutorSpec.scala b/src/test/scala/sangria/execution/batch/BatchExecutorSpec.scala index 13921c65..35015f3f 100644 --- a/src/test/scala/sangria/execution/batch/BatchExecutorSpec.scala +++ b/src/test/scala/sangria/execution/batch/BatchExecutorSpec.scala @@ -28,25 +28,25 @@ class BatchExecutorSpec extends WordSpec with Matchers with FutureResultSupport val DataArg = Argument("data", ListInputType(DataInputType)) - lazy val QueryType: ObjectType[Unit, Unit] = ObjectType("Query", () ⇒ fields[Unit, Unit]( - Field("ids", ListType(IntType), resolve = _ ⇒ List(1, 2)), - Field("ids1", ListType(IntType), resolve = _ ⇒ List(4, 5)), - Field("ids2", ListType(IntType), resolve = _ ⇒ Nil), - Field("name1", StringType, resolve = _ ⇒ "some name 1"), - Field("name2", OptionType(StringType), resolve = _ ⇒ "some name 2"), + lazy val QueryType: ObjectType[Unit, Unit] = ObjectType("Query", () => fields[Unit, Unit]( + Field("ids", ListType(IntType), resolve = _ => List(1, 2)), + Field("ids1", ListType(IntType), resolve = _ => List(4, 5)), + Field("ids2", ListType(IntType), resolve = _ => Nil), + Field("name1", StringType, resolve = _ => "some name 1"), + Field("name2", OptionType(StringType), resolve = _ => "some name 2"), Field("greet", StringType, arguments = NameArg :: Nil, - resolve = c ⇒ s"Hello, ${c arg NameArg}!"), + resolve = c => s"Hello, ${c arg NameArg}!"), Field("greetAll", StringType, arguments = NamesArg :: Nil, - resolve = c ⇒ s"Hello, ${c arg NamesArg mkString " and "}!"), - Field("nested", QueryType, resolve = _ ⇒ ()), + resolve = c => s"Hello, ${c arg NamesArg mkString " and "}!"), + Field("nested", QueryType, resolve = _ => ()), Field("stuff", ListType(DataType), arguments = IdsArg :: Nil, - resolve = _.arg(IdsArg).map(id ⇒ id → s"data #$id")), + resolve = _.arg(IdsArg).map(id => id -> s"data #$id")), Field("single", DataType, arguments = IdArg :: Nil, - resolve = _.withArgs(IdArg)(id ⇒ id → s"data #$id")), + resolve = _.withArgs(IdArg)(id => id -> s"data #$id")), Field("stuff1", StringType, arguments = IdsArg :: Nil, resolve = _.arg(IdsArg).mkString(", ")) @@ -55,7 +55,7 @@ class BatchExecutorSpec extends WordSpec with Matchers with FutureResultSupport lazy val MutationType = ObjectType("Mutation", fields[Unit, Unit]( Field("createData", ListType(DataType), arguments = DataArg :: Nil, - resolve = _.withArgs(DataArg)(_.map(d ⇒ d("id").asInstanceOf[Int] → d("name").asInstanceOf[String]))))) + resolve = _.withArgs(DataArg)(_.map(d => d("id").asInstanceOf[Int] -> d("name").asInstanceOf[String]))))) val schema = Schema(QueryType, Some(MutationType), directives = BuiltinDirectives :+ BatchExecutor.ExportDirective) @@ -100,9 +100,9 @@ class BatchExecutorSpec extends WordSpec with Matchers with FutureResultSupport """ val vars = ScalaInput.scalaInput(Map( - "ids" → Vector(111, 222, 444), - "bar" → Map("a" → "hello", "b" → "world"), - "name" → "Bob")) + "ids" -> Vector(111, 222, 444), + "bar" -> Map("a" -> "hello", "b" -> "world"), + "name" -> "Bob")) val res = BatchExecutor.executeBatch(schema, query, operationNames = List("q1", "q2", "q3"), @@ -321,8 +321,8 @@ class BatchExecutorSpec extends WordSpec with Matchers with FutureResultSupport checkContainsViolations( BatchExecutor.executeBatch(schema, query, operationNames = List("q1", "q2")).toListL.runAsync.await, - "Inferred variable '$ids' in operation 'q2' is used with two conflicting types: '[Int!]!' and 'Int!'." → List(Pos(7, 24), Pos(8, 24)), - "Inferred variable '$ids' in operation 'q2' is used with two conflicting types: '[Int!]!' and 'String!'." → List(Pos(7, 24), Pos(10, 25))) + "Inferred variable '$ids' in operation 'q2' is used with two conflicting types: '[Int!]!' and 'Int!'." -> List(Pos(7, 24), Pos(8, 24)), + "Inferred variable '$ids' in operation 'q2' is used with two conflicting types: '[Int!]!' and 'String!'." -> List(Pos(7, 24), Pos(10, 25))) } "not allow circular dependencies" in { @@ -354,9 +354,9 @@ class BatchExecutorSpec extends WordSpec with Matchers with FutureResultSupport checkContainsViolations( BatchExecutor.executeBatch(schema, query, operationNames = List("q1", "q2", "q3")).toListL.runAsync.await, - "Operation 'q1' has a circular dependency at path 'q1($from3) -> q3($from2) -> q2($from1) -> q1'." → List(Pos(2, 11)), - "Operation 'q3' has a circular dependency at path 'q3($from2) -> q2($from1) -> q1($from3) -> q3'." → List(Pos(20, 11)), - "Operation 'q2' has a circular dependency at path 'q2($from1) -> q1($from3) -> q3($from2) -> q2'." → List(Pos(14, 11))) + "Operation 'q1' has a circular dependency at path 'q1($from3) -> q3($from2) -> q2($from1) -> q1'." -> List(Pos(2, 11)), + "Operation 'q3' has a circular dependency at path 'q3($from2) -> q2($from1) -> q1($from3) -> q3'." -> List(Pos(20, 11)), + "Operation 'q2' has a circular dependency at path 'q2($from1) -> q1($from3) -> q3($from2) -> q2'." -> List(Pos(14, 11))) } } diff --git a/src/test/scala/sangria/execution/deferred/DeferredResolverSpec.scala b/src/test/scala/sangria/execution/deferred/DeferredResolverSpec.scala index 6cf454ce..c81f3623 100644 --- a/src/test/scala/sangria/execution/deferred/DeferredResolverSpec.scala +++ b/src/test/scala/sangria/execution/deferred/DeferredResolverSpec.scala @@ -17,46 +17,46 @@ class DeferredResolverSpec extends WordSpec with Matchers with FutureResultSuppo def deferredResolver(implicit ec: ExecutionContext) = { case class LoadCategories(ids: Seq[String]) extends Deferred[Seq[String]] - lazy val CategoryType: ObjectType[Unit, String] = ObjectType("Category", () ⇒ fields[Unit, String]( - Field("name", StringType, resolve = c ⇒ s"Cat ${c.value}"), - Field("descr", StringType, resolve = c ⇒ s"Cat ${c.value} descr"), - Field("self", CategoryType, resolve = c ⇒ c.value), - Field("selfFut", CategoryType, resolve = c ⇒ Future(c.value)), + lazy val CategoryType: ObjectType[Unit, String] = ObjectType("Category", () => fields[Unit, String]( + Field("name", StringType, resolve = c => s"Cat ${c.value}"), + Field("descr", StringType, resolve = c => s"Cat ${c.value} descr"), + Field("self", CategoryType, resolve = c => c.value), + Field("selfFut", CategoryType, resolve = c => Future(c.value)), Field("selfFutComplex", CategoryType, - complexity = Some((_, _, _) ⇒ 1000), - resolve = c ⇒ Future(c.value)), + complexity = Some((_, _, _) => 1000), + resolve = c => Future(c.value)), Field("children", ListType(CategoryType), arguments = Argument("count", IntType) :: Nil, - resolve = c ⇒ LoadCategories((1 to c.arg[Int]("count")).map(i ⇒ s"${c.value}.$i"))), + resolve = c => LoadCategories((1 to c.arg[Int]("count")).map(i => s"${c.value}.$i"))), Field("childrenComplex", ListType(CategoryType), - complexity = Some((_, _, _) ⇒ 1000), + complexity = Some((_, _, _) => 1000), arguments = Argument("count", IntType) :: Nil, - resolve = c ⇒ LoadCategories((1 to c.arg[Int]("count")).map(i ⇒ s"${c.value}.$i"))), + resolve = c => LoadCategories((1 to c.arg[Int]("count")).map(i => s"${c.value}.$i"))), Field("childrenFut", ListType(CategoryType), arguments = Argument("count", IntType) :: Nil, - resolve = c ⇒ DeferredFutureValue(Future.successful( - LoadCategories((1 to c.arg[Int]("count")).map(i ⇒ s"${c.value}.$i"))))) + resolve = c => DeferredFutureValue(Future.successful( + LoadCategories((1 to c.arg[Int]("count")).map(i => s"${c.value}.$i"))))) )) val QueryType = ObjectType("Query", fields[Unit, Unit]( - Field("root", CategoryType, resolve = _ ⇒ DeferredValue(LoadCategories(Seq("root"))).map(_.head)), - Field("rootFut", CategoryType, resolve = _ ⇒ DeferredFutureValue(Future.successful(LoadCategories(Seq("root")))).map(_.head)), - Field("fail1", OptionType(CategoryType), resolve = _ ⇒ DeferredValue(LoadCategories(Seq("fail"))).map(_.head)), - Field("fail2", OptionType(CategoryType), resolve = _ ⇒ DeferredValue(LoadCategories(Seq("fail"))).map(_.head)) + Field("root", CategoryType, resolve = _ => DeferredValue(LoadCategories(Seq("root"))).map(_.head)), + Field("rootFut", CategoryType, resolve = _ => DeferredFutureValue(Future.successful(LoadCategories(Seq("root")))).map(_.head)), + Field("fail1", OptionType(CategoryType), resolve = _ => DeferredValue(LoadCategories(Seq("fail"))).map(_.head)), + Field("fail2", OptionType(CategoryType), resolve = _ => DeferredValue(LoadCategories(Seq("fail"))).map(_.head)) )) val MutationType = ObjectType("Mutation", fields[Unit, Unit]( - Field("root", OptionType(CategoryType), resolve = _ ⇒ DeferredValue(LoadCategories(Seq("root"))).map(_.head)), - Field("fail1", OptionType(CategoryType), resolve = _ ⇒ DeferredValue(LoadCategories(Seq("fail"))).map(_.head)), - Field("fail2", OptionType(CategoryType), resolve = _ ⇒ DeferredValue(LoadCategories(Seq("fail"))).map(_.head)) + Field("root", OptionType(CategoryType), resolve = _ => DeferredValue(LoadCategories(Seq("root"))).map(_.head)), + Field("fail1", OptionType(CategoryType), resolve = _ => DeferredValue(LoadCategories(Seq("fail"))).map(_.head)), + Field("fail2", OptionType(CategoryType), resolve = _ => DeferredValue(LoadCategories(Seq("fail"))).map(_.head)) )) class MyDeferredResolver extends DeferredResolver[Any] { val callsCount = new AtomicInteger(0) val valueCount = new AtomicInteger(0) - override val includeDeferredFromField: Option[(Field[_, _], Vector[ast.Field], Args, Double) ⇒ Boolean] = - Some((_, _, _, complexity) ⇒ complexity < 100) + override val includeDeferredFromField: Option[(Field[_, _], Vector[ast.Field], Args, Double) => Boolean] = + Some((_, _, _, complexity) => complexity < 100) override def groupDeferred[T <: DeferredWithInfo](deferred: Vector[T]) = { val (expensive, cheap) = deferred.partition(_.complexity > 100) @@ -68,8 +68,8 @@ class DeferredResolverSpec extends WordSpec with Matchers with FutureResultSuppo valueCount.addAndGet(deferred.size) deferred.map { - case LoadCategories(ids) if ids contains "fail" ⇒ Future.failed(new IllegalStateException("foo")) - case LoadCategories(ids) ⇒ Future.successful(ids) + case LoadCategories(ids) if ids contains "fail" => Future.failed(new IllegalStateException("foo")) + case LoadCategories(ids) => Future.successful(ids) } } } @@ -81,7 +81,7 @@ class DeferredResolverSpec extends WordSpec with Matchers with FutureResultSuppo val result = Executor.execute(schema, query, deferredResolver = resolver).await - resolver → result + resolver -> result } "result in a single resolution of once level" in { @@ -232,12 +232,12 @@ class DeferredResolverSpec extends WordSpec with Matchers with FutureResultSuppo } """, Map( - "fail1" → null, - "root" → Map("name" → "Cat root"), - "fail2" → null), + "fail1" -> null, + "root" -> Map("name" -> "Cat root"), + "fail2" -> null), List( - "foo" → List(Pos(3, 11)), - "foo" → List(Pos(5, 11))), + "foo" -> List(Pos(3, 11)), + "foo" -> List(Pos(5, 11))), resolver = new MyDeferredResolver) "failed mutations should be handled appropriately" in checkContainsErrors(schema, (), @@ -249,12 +249,12 @@ class DeferredResolverSpec extends WordSpec with Matchers with FutureResultSuppo } """, Map( - "fail1" → null, - "root" → Map("name" → "Cat root"), - "fail2" → null), + "fail1" -> null, + "root" -> Map("name" -> "Cat root"), + "fail2" -> null), List( - "foo" → List(Pos(3, 11)), - "foo" → List(Pos(5, 11))), + "foo" -> List(Pos(3, 11)), + "foo" -> List(Pos(5, 11))), resolver = new MyDeferredResolver) } @@ -267,4 +267,4 @@ class DeferredResolverSpec extends WordSpec with Matchers with FutureResultSuppo behave like deferredResolver (sync.executionContext) } } -} \ No newline at end of file +} diff --git a/src/test/scala/sangria/execution/deferred/FetcherSpec.scala b/src/test/scala/sangria/execution/deferred/FetcherSpec.scala index 2abd9b59..baa12c38 100644 --- a/src/test/scala/sangria/execution/deferred/FetcherSpec.scala +++ b/src/test/scala/sangria/execution/deferred/FetcherSpec.scala @@ -50,16 +50,16 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { Product(6, "Golden ring", Vector("6"))) def loadCategories(ids: Seq[String])(implicit ec: ExecutionContext): Future[Seq[Category]] = - Future(ids.flatMap(id ⇒ categories.find(_.id == id))) + Future(ids.flatMap(id => categories.find(_.id == id))) def loadProducts(ids: Seq[Int])(implicit ec: ExecutionContext): Future[Seq[Product]] = - Future(ids.flatMap(id ⇒ products.find(_.id == id))) + Future(ids.flatMap(id => products.find(_.id == id))) def loadProductsByCategory(categoryIds: Seq[String])(implicit ec: ExecutionContext): Future[Seq[Product]] = - Future(products.filter(p ⇒ categoryIds.exists(p.inCategories contains _))) + Future(products.filter(p => categoryIds.exists(p.inCategories contains _))) def loadCategoriesByProduct(productIds: Seq[Int])(implicit ec: ExecutionContext): Future[Seq[Category]] = - Future(categories.filter(c ⇒ productIds.exists(c.products contains _))) + Future(categories.filter(c => productIds.exists(c.products contains _))) def getCategory(id: String)(implicit ec: ExecutionContext) = Future(categories.find(_.id == id)) @@ -70,81 +70,81 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { def properFetcher(implicit ec: ExecutionContext) = { val defaultCatFetcher = Fetcher.relCaching[Repo, Category, Category, String]( - (repo, ids) ⇒ repo.loadCategories(ids), - (repo, ids) ⇒ repo.loadCategoriesByProduct(ids(catProd))) + (repo, ids) => repo.loadCategories(ids), + (repo, ids) => repo.loadCategoriesByProduct(ids(catProd))) val defaultProdFetcher = Fetcher.relCaching[Repo, Product, Product, Int]( - (repo, ids) ⇒ repo.loadProducts(ids), - (repo, ids) ⇒ repo.loadProductsByCategory(ids(prodCat))) + (repo, ids) => repo.loadProducts(ids), + (repo, ids) => repo.loadProductsByCategory(ids(prodCat))) val complexProdFetcher = Fetcher.relCaching[Repo, Product, (Seq[String], Product), Int]( - (repo, ids) ⇒ repo.loadProducts(ids), - (repo, ids) ⇒ repo.loadProductsByCategory(ids(prodComplexCat)).map(_.map(p ⇒ p.inCategories → p))) + (repo, ids) => repo.loadProducts(ids), + (repo, ids) => repo.loadProductsByCategory(ids(prodComplexCat)).map(_.map(p => p.inCategories -> p))) val defaultResolver = DeferredResolver.fetchers(defaultProdFetcher, defaultCatFetcher) def schema(fetcherCat: Fetcher[Repo, Category, Category, String] = defaultCatFetcher, fetcherProd: Fetcher[Repo, Product, Product, Int] = defaultProdFetcher) = { - lazy val ProductType: ObjectType[Repo, Product] = ObjectType("Product", () ⇒ fields( - Field("id", IntType, resolve = c ⇒ c.value.id), - Field("name", StringType, resolve = c ⇒ c.value.name), + lazy val ProductType: ObjectType[Repo, Product] = ObjectType("Product", () => fields( + Field("id", IntType, resolve = c => c.value.id), + Field("name", StringType, resolve = c => c.value.name), Field("categories", ListType(CategoryType), - resolve = c ⇒ fetcherCat.deferSeqOpt(c.value.inCategories)), + resolve = c => fetcherCat.deferSeqOpt(c.value.inCategories)), Field("categoryRel", CategoryType, - resolve = c ⇒ fetcherCat.deferRel(catProd, c.value.id)), + resolve = c => fetcherCat.deferRel(catProd, c.value.id)), Field("categoryRelOpt", OptionType(CategoryType), - resolve = c ⇒ fetcherCat.deferRelOpt(catProd, c.value.id)), + resolve = c => fetcherCat.deferRelOpt(catProd, c.value.id)), Field("categoryRelSeq", ListType(CategoryType), - resolve = c ⇒ fetcherCat.deferRelSeq(catProd, c.value.id)))) - - lazy val CategoryType: ObjectType[Repo, Category] = ObjectType("Category", () ⇒ fields( - Field("id", StringType, resolve = c ⇒ c.value.id), - Field("name", StringType, resolve = c ⇒ c.value.name), - Field("color", StringType, resolve = c ⇒ ColorDeferred("red")), - Field("self", CategoryType, resolve = c ⇒ c.value), - Field("selfOpt", OptionType(CategoryType), resolve = c ⇒ Some(c.value)), - Field("selfFut", CategoryType, resolve = c ⇒ Future(c.value)), + resolve = c => fetcherCat.deferRelSeq(catProd, c.value.id)))) + + lazy val CategoryType: ObjectType[Repo, Category] = ObjectType("Category", () => fields( + Field("id", StringType, resolve = c => c.value.id), + Field("name", StringType, resolve = c => c.value.name), + Field("color", StringType, resolve = c => ColorDeferred("red")), + Field("self", CategoryType, resolve = c => c.value), + Field("selfOpt", OptionType(CategoryType), resolve = c => Some(c.value)), + Field("selfFut", CategoryType, resolve = c => Future(c.value)), Field("products", ListType(ProductType), - resolve = c ⇒ fetcherProd.deferSeqOpt(c.value.products)), + resolve = c => fetcherProd.deferSeqOpt(c.value.products)), Field("productRel", ProductType, - resolve = c ⇒ fetcherProd.deferRel(prodCat, c.value.id)), + resolve = c => fetcherProd.deferRel(prodCat, c.value.id)), Field("productComplexRel", ListType(ProductType), - resolve = c ⇒ complexProdFetcher.deferRelSeq(prodComplexCat, c.value.id)), + resolve = c => complexProdFetcher.deferRelSeq(prodComplexCat, c.value.id)), Field("productRelOpt", OptionType(ProductType), - resolve = c ⇒ fetcherProd.deferRelOpt(prodCat, c.value.id)), + resolve = c => fetcherProd.deferRelOpt(prodCat, c.value.id)), Field("productRelSeq", ListType(ProductType), - resolve = c ⇒ fetcherProd.deferRelSeq(prodCat, c.value.id)), + resolve = c => fetcherProd.deferRelSeq(prodCat, c.value.id)), Field("categoryNonOpt", CategoryType, arguments = Argument("id", StringType) :: Nil, - resolve = c ⇒ fetcherCat.defer(c.arg[String]("id"))), + resolve = c => fetcherCat.defer(c.arg[String]("id"))), Field("childrenSeq", ListType(CategoryType), - resolve = c ⇒ fetcherCat.deferSeq(c.value.children)), + resolve = c => fetcherCat.deferSeq(c.value.children)), Field("childrenSeqOpt", ListType(CategoryType), - resolve = c ⇒ fetcherCat.deferSeqOpt(c.value.children)), + resolve = c => fetcherCat.deferSeqOpt(c.value.children)), Field("childrenFut", ListType(CategoryType), - resolve = c ⇒ Future.successful( + resolve = c => Future.successful( fetcherCat.deferSeq(c.value.children))))) val QueryType = ObjectType("Query", fields[Repo, Unit]( Field("category", OptionType(CategoryType), arguments = Argument("id", StringType) :: Nil, - resolve = c ⇒ fetcherCat.deferOpt(c.arg[String]("id"))), + resolve = c => fetcherCat.deferOpt(c.arg[String]("id"))), Field("categoryEager", OptionType(CategoryType), arguments = Argument("id", StringType) :: Nil, - resolve = c ⇒ c.ctx.getCategory(c.arg[String]("id"))), + resolve = c => c.ctx.getCategory(c.arg[String]("id"))), Field("categoryNonOpt", CategoryType, arguments = Argument("id", StringType) :: Nil, - resolve = c ⇒ fetcherCat.defer(c.arg[String]("id"))), + resolve = c => fetcherCat.defer(c.arg[String]("id"))), Field("products", ListType(ProductType), arguments = Argument("categoryIds", ListInputType(StringType)) :: Nil, - resolve = c ⇒ fetcherProd.deferRelSeqMany(prodCat, c.arg[Seq[String]]("categoryIds"))), + resolve = c => fetcherProd.deferRelSeqMany(prodCat, c.arg[Seq[String]]("categoryIds"))), Field("productOpt", OptionType(ProductType), arguments = Argument("id", OptionInputType(IntType)) :: Nil, - resolve = c ⇒ fetcherProd.deferOpt(c.argOpt[Int]("id"))), + resolve = c => fetcherProd.deferOpt(c.argOpt[Int]("id"))), Field("productsOptExplicit", ListType(OptionType(ProductType)), arguments = Argument("ids", ListInputType(IntType)) :: Nil, - resolve = c ⇒ fetcherProd.deferSeqOptExplicit(c.arg[Seq[Int]]("ids"))), - Field("root", CategoryType, resolve = _ ⇒ fetcherCat.defer("1")), - Field("rootFut", CategoryType, resolve = _ ⇒ + resolve = c => fetcherProd.deferSeqOptExplicit(c.arg[Seq[Int]]("ids"))), + Field("root", CategoryType, resolve = _ => fetcherCat.defer("1")), + Field("rootFut", CategoryType, resolve = _ => Future.successful(fetcherCat.defer("1"))))) Schema(QueryType) @@ -183,7 +183,7 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { var fetchedIds = Vector.empty[Seq[String]] val fetcher = - Fetcher((repo: Repo, ids: Seq[String]) ⇒ { + Fetcher((repo: Repo, ids: Seq[String]) => { fetchedIds = fetchedIds :+ ids repo.loadCategories(ids) @@ -192,7 +192,7 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { var fetchedIdsCached = Vector.empty[Seq[String]] val fetcherCached = - Fetcher.caching((repo: Repo, ids: Seq[String]) ⇒ { + Fetcher.caching((repo: Repo, ids: Seq[String]) => { fetchedIdsCached = fetchedIdsCached :+ ids repo.loadCategories(ids) @@ -217,51 +217,51 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { List(res, resCached) foreach (_ should be ( Map( - "data" → Map( - "c1" → null, - "c3" → Map( - "name" → "Cat 8", - "childrenSeqOpt" → Vector( + "data" -> Map( + "c1" -> null, + "c3" -> Map( + "name" -> "Cat 8", + "childrenSeqOpt" -> Vector( Map( - "id" → "4"), + "id" -> "4"), Map( - "id" → "5"))), - "rootFut" → Map( - "id" → "1", - "name" → "Root", - "childrenSeq" → Vector( + "id" -> "5"))), + "rootFut" -> Map( + "id" -> "1", + "name" -> "Root", + "childrenSeq" -> Vector( Map( - "id" → "2", - "name" → "Cat 2", - "childrenSeq" → Vector( + "id" -> "2", + "name" -> "Cat 2", + "childrenSeq" -> Vector( Map( - "id" → "5", - "name" → "Cat 5", - "childrenSeq" → Vector.empty), + "id" -> "5", + "name" -> "Cat 5", + "childrenSeq" -> Vector.empty), Map( - "id" → "6", - "name" → "Cat 6", - "childrenSeq" → Vector.empty))), + "id" -> "6", + "name" -> "Cat 6", + "childrenSeq" -> Vector.empty))), Map( - "id" → "3", - "name" → "Cat 3", - "childrenSeq" → Vector( + "id" -> "3", + "name" -> "Cat 3", + "childrenSeq" -> Vector( Map( - "id" → "7", - "name" → "Cat 7", - "childrenSeq" → Vector.empty), + "id" -> "7", + "name" -> "Cat 7", + "childrenSeq" -> Vector.empty), Map( - "id" → "5", - "name" → "Cat 5", - "childrenSeq" → Vector.empty), + "id" -> "5", + "name" -> "Cat 5", + "childrenSeq" -> Vector.empty), Map( - "id" → "6", - "name" → "Cat 6", - "childrenSeq" → Vector.empty))), + "id" -> "6", + "name" -> "Cat 6", + "childrenSeq" -> Vector.empty))), Map( - "id" → "4", - "name" → "Cat 4", - "childrenSeq" → Vector.empty))))))) + "id" -> "4", + "name" -> "Cat 4", + "childrenSeq" -> Vector.empty))))))) } "fetch results with `deferOpt` and option argument" in { @@ -278,12 +278,12 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { res should be ( Map( - "data" → Map( - "p1" → Map( - "id" → 1, - "name" → "Rusty sword"), - "p2" → null, - "p3" → null))) + "data" -> Map( + "p1" -> Map( + "id" -> 1, + "name" -> "Rusty sword"), + "p2" -> null, + "p3" -> null))) } "fetch results with `deferSeqOptExplicit`" in { @@ -296,12 +296,12 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { Executor.execute(schema(), query, new Repo, deferredResolver = defaultResolver).await should be ( Map( - "data" → Map( - "productsOptExplicit" → Vector( - Map("id" → 1, "name" → "Rusty sword"), + "data" -> Map( + "productsOptExplicit" -> Vector( + Map("id" -> 1, "name" -> "Rusty sword"), null, - Map("id" → 2, "name" → "Magic belt"), - Map("id" → 3, "name" → "Health potion"), + Map("id" -> 2, "name" -> "Magic belt"), + Map("id" -> 3, "name" -> "Health potion"), null)))) } @@ -332,12 +332,12 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { val fetcher = Fetcher.rel( - (repo: Repo, ids: Seq[String]) ⇒ { + (repo: Repo, ids: Seq[String]) => { fetchedIds = fetchedIds :+ ids repo.loadCategories(ids) }, - (repo: Repo, ids: RelationIds[Category]) ⇒ { + (repo: Repo, ids: RelationIds[Category]) => { fetchedRels = fetchedRels :+ ids repo.loadCategoriesByProduct(ids(catProd)) @@ -348,12 +348,12 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { val fetcherCached = Fetcher.relCaching( - (repo: Repo, ids: Seq[String]) ⇒ { + (repo: Repo, ids: Seq[String]) => { fetchedIdsCached = fetchedIdsCached :+ ids repo.loadCategories(ids) }, - (repo: Repo, ids: RelationIds[Category]) ⇒ { + (repo: Repo, ids: RelationIds[Category]) => { fetchedRelsCached = fetchedRelsCached :+ ids repo.loadCategoriesByProduct(ids(catProd)) @@ -363,7 +363,7 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { val fetcherRelsOnly = Fetcher.relOnly( - (repo: Repo, ids: RelationIds[Category]) ⇒ { + (repo: Repo, ids: RelationIds[Category]) => { fetchedRelsOnly = fetchedRelsOnly :+ ids repo.loadCategoriesByProduct(ids(catProd)) @@ -373,7 +373,7 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { val fetcherRelsOnlyCached = Fetcher.relOnlyCaching( - (repo: Repo, ids: RelationIds[Category]) ⇒ { + (repo: Repo, ids: RelationIds[Category]) => { fetchedRelsOnlyCached = fetchedRelsOnlyCached :+ ids repo.loadCategoriesByProduct(ids(catProd)) @@ -395,11 +395,11 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { fetchedIdsCached should have size 0 val relsOut = Vector( - RelationIds[Category](Map(catProd → Vector(1, 2, 3))), - RelationIds[Category](Map(catProd → Vector(1, 2, 3))), - RelationIds[Category](Map(catProd → Vector(1, 2, 3)))) + RelationIds[Category](Map(catProd -> Vector(1, 2, 3))), + RelationIds[Category](Map(catProd -> Vector(1, 2, 3))), + RelationIds[Category](Map(catProd -> Vector(1, 2, 3)))) - val relsCachedOut = Vector(RelationIds[Category](Map(catProd → Vector(1, 2, 3)))) + val relsCachedOut = Vector(RelationIds[Category](Map(catProd -> Vector(1, 2, 3)))) fetchedRels should be (relsOut) @@ -436,28 +436,28 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { deferredResolver = DeferredResolver.fetchers(complexProdFetcher, defaultProdFetcher, defaultCatFetcher)).await res should be (Map( - "data" → Map( - "c1" → Map( - "productComplexRel" → Vector( + "data" -> Map( + "c1" -> Map( + "productComplexRel" -> Vector( Map( - "id" → 2), + "id" -> 2), Map( - "id" → 4))), - "c2" → Map( - "productComplexRel" → Vector( + "id" -> 4))), + "c2" -> Map( + "productComplexRel" -> Vector( Map( - "name" → "Rusty sword"), + "name" -> "Rusty sword"), Map( - "name" → "Common boots"), + "name" -> "Common boots"), Map( - "name" → "Golden ring")))))) + "name" -> "Golden ring")))))) } "should result in error for missing non-optional values" in { var fetchedIds = Vector.empty[Seq[String]] val fetcher = - Fetcher((repo: Repo, ids: Seq[String]) ⇒ { + Fetcher((repo: Repo, ids: Seq[String]) => { fetchedIds = fetchedIds :+ ids repo.loadCategories(ids) @@ -476,13 +476,13 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { } """, Map( - "c1" → null, - "c2" → Map( - "name" → "Root", - "selfOpt" → null)), + "c1" -> null, + "c2" -> Map( + "name" -> "Root", + "selfOpt" -> null)), List( - "Fetcher has not resolved non-optional ID 'foo!'." → List(Pos(3, 41)), - "Fetcher has not resolved non-optional ID 'qwe'." → List(Pos(7, 17))), + "Fetcher has not resolved non-optional ID 'foo!'." -> List(Pos(3, 41)), + "Fetcher has not resolved non-optional ID 'qwe'." -> List(Pos(7, 17))), resolver = DeferredResolver.fetchers(fetcher), userContext = new Repo) @@ -496,15 +496,15 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { val callsCount = new AtomicInteger(0) val valueCount = new AtomicInteger(0) - override val includeDeferredFromField: Option[(Field[_, _], Vector[ast.Field], Args, Double) ⇒ Boolean] = - Some((_, _, _, _) ⇒ false) + override val includeDeferredFromField: Option[(Field[_, _], Vector[ast.Field], Args, Double) => Boolean] = + Some((_, _, _, _) => false) def resolve(deferred: Vector[Deferred[Any]], ctx: Any, queryState: Any)(implicit ec: ExecutionContext) = { callsCount.getAndIncrement() valueCount.addAndGet(deferred.size) deferred.map { - case ColorDeferred(id) ⇒ Future.successful(id + "Color") + case ColorDeferred(id) => Future.successful(id + "Color") } } } @@ -522,18 +522,18 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { } """, Map( - "data" → Map( - "c1" → Map( - "name" → "Root", - "childrenSeq" → Vector( - Map("id" → "2"), - Map("id" → "3"), - Map("id" → "4"))), - "c2" → Map( - "color" → "redColor", - "childrenSeq" → Vector( - Map("name" → "Cat 5"), - Map("name" → "Cat 6"))))), + "data" -> Map( + "c1" -> Map( + "name" -> "Root", + "childrenSeq" -> Vector( + Map("id" -> "2"), + Map("id" -> "3"), + Map("id" -> "4"))), + "c2" -> Map( + "color" -> "redColor", + "childrenSeq" -> Vector( + Map("name" -> "Cat 5"), + Map("name" -> "Cat 6"))))), resolver = DeferredResolver.fetchersWithFallback(new MyDeferredResolver, defaultCatFetcher, defaultProdFetcher), userContext = new Repo) } @@ -542,10 +542,10 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { var fetchedIds = Vector.empty[Seq[String]] val cache = FetcherCache.simple - (1 to 3) foreach { _ ⇒ + (1 to 3) foreach { _ => val fetcher = Fetcher.caching( config = FetcherConfig.caching(cache), - fetch = (repo: Repo, ids: Seq[String]) ⇒ { + fetch = (repo: Repo, ids: Seq[String]) => { fetchedIds = fetchedIds :+ ids repo.loadCategories(ids) @@ -568,25 +568,25 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { } """, Map( - "data" → Map( - "root" → Map( - "childrenSeq" → Vector( + "data" -> Map( + "root" -> Map( + "childrenSeq" -> Vector( Map( - "childrenSeq" → Vector( + "childrenSeq" -> Vector( Map( - "childrenSeq" → Vector.empty), + "childrenSeq" -> Vector.empty), Map( - "childrenSeq" → Vector.empty))), + "childrenSeq" -> Vector.empty))), Map( - "childrenSeq" → Vector( + "childrenSeq" -> Vector( Map( - "childrenSeq" → Vector.empty), + "childrenSeq" -> Vector.empty), Map( - "childrenSeq" → Vector.empty), + "childrenSeq" -> Vector.empty), Map( - "childrenSeq" → Vector.empty))), + "childrenSeq" -> Vector.empty))), Map( - "childrenSeq" → Vector.empty))))), + "childrenSeq" -> Vector.empty))))), resolver = DeferredResolver.fetchers(fetcher), userContext = new Repo) } @@ -601,7 +601,7 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { var fetchedCatIds = Vector.empty[Seq[String]] val fetcherCat = - Fetcher((repo: Repo, ids: Seq[String]) ⇒ { + Fetcher((repo: Repo, ids: Seq[String]) => { fetchedCatIds = fetchedCatIds :+ ids repo.loadCategories(ids) @@ -610,7 +610,7 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { var fetchedProdIds = Vector.empty[Seq[Int]] val fetcherProd = - Fetcher((repo: Repo, ids: Seq[Int]) ⇒ { + Fetcher((repo: Repo, ids: Seq[Int]) => { fetchedProdIds = fetchedProdIds :+ ids repo.loadProducts(ids) @@ -641,111 +641,111 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { } """, Map( - "data" → Map( - "root" → Map( - "name" → "Root", - "products" → Vector.empty, - "childrenSeq" → Vector( + "data" -> Map( + "root" -> Map( + "name" -> "Root", + "products" -> Vector.empty, + "childrenSeq" -> Vector( Map( - "name" → "Cat 2", - "products" → Vector.empty, - "childrenSeq" → Vector( + "name" -> "Cat 2", + "products" -> Vector.empty, + "childrenSeq" -> Vector( Map( - "name" → "Cat 5", - "products" → Vector( + "name" -> "Cat 5", + "products" -> Vector( Map( - "name" → "Magic belt", - "categories" → Vector( - Map("name" → "Cat 4"), - Map("name" → "Cat 5"), - Map("name" → "Cat 7"))), + "name" -> "Magic belt", + "categories" -> Vector( + Map("name" -> "Cat 4"), + Map("name" -> "Cat 5"), + Map("name" -> "Cat 7"))), Map( - "name" → "Unidentified potion", - "categories" → Vector( - Map("name" → "Cat 5"))))), + "name" -> "Unidentified potion", + "categories" -> Vector( + Map("name" -> "Cat 5"))))), Map( - "name" → "Cat 6", - "products" → Vector( + "name" -> "Cat 6", + "products" -> Vector( Map( - "name" → "Common boots", - "categories" → Vector( - Map("name" → "Cat 6"))), + "name" -> "Common boots", + "categories" -> Vector( + Map("name" -> "Cat 6"))), Map( - "name" → "Golden ring", - "categories" → Vector( - Map("name" → "Cat 6"))), + "name" -> "Golden ring", + "categories" -> Vector( + Map("name" -> "Cat 6"))), Map( - "name" → "Rusty sword", - "categories" → Vector( - Map("name" → "Cat 4"), - Map("name" → "Cat 6"))))))), + "name" -> "Rusty sword", + "categories" -> Vector( + Map("name" -> "Cat 4"), + Map("name" -> "Cat 6"))))))), Map( - "name" → "Cat 3", - "products" → Vector.empty, - "childrenSeq" → Vector( + "name" -> "Cat 3", + "products" -> Vector.empty, + "childrenSeq" -> Vector( Map( - "name" → "Cat 7", - "products" → Vector( + "name" -> "Cat 7", + "products" -> Vector( Map( - "name" → "Magic belt", - "categories" → Vector( - Map("name" → "Cat 4"), - Map("name" → "Cat 5"), - Map("name" → "Cat 7"))), + "name" -> "Magic belt", + "categories" -> Vector( + Map("name" -> "Cat 4"), + Map("name" -> "Cat 5"), + Map("name" -> "Cat 7"))), Map( - "name" → "Health potion", - "categories" → Vector( - Map("name" → "Cat 4"), - Map("name" → "Cat 7"))))), + "name" -> "Health potion", + "categories" -> Vector( + Map("name" -> "Cat 4"), + Map("name" -> "Cat 7"))))), Map( - "name" → "Cat 5", - "products" → Vector( + "name" -> "Cat 5", + "products" -> Vector( Map( - "name" → "Magic belt", - "categories" → Vector( - Map("name" → "Cat 4"), - Map("name" → "Cat 5"), - Map("name" → "Cat 7"))), + "name" -> "Magic belt", + "categories" -> Vector( + Map("name" -> "Cat 4"), + Map("name" -> "Cat 5"), + Map("name" -> "Cat 7"))), Map( - "name" → "Unidentified potion", - "categories" → Vector( - Map("name" → "Cat 5"))))), + "name" -> "Unidentified potion", + "categories" -> Vector( + Map("name" -> "Cat 5"))))), Map( - "name" → "Cat 6", - "products" → Vector( + "name" -> "Cat 6", + "products" -> Vector( Map( - "name" → "Common boots", - "categories" → Vector( - Map("name" → "Cat 6"))), + "name" -> "Common boots", + "categories" -> Vector( + Map("name" -> "Cat 6"))), Map( - "name" → "Golden ring", - "categories" → Vector( - Map("name" → "Cat 6"))), + "name" -> "Golden ring", + "categories" -> Vector( + Map("name" -> "Cat 6"))), Map( - "name" → "Rusty sword", - "categories" → Vector( - Map("name" → "Cat 4"), - Map("name" → "Cat 6"))))))), + "name" -> "Rusty sword", + "categories" -> Vector( + Map("name" -> "Cat 4"), + Map("name" -> "Cat 6"))))))), Map( - "name" → "Cat 4", - "products" → Vector( + "name" -> "Cat 4", + "products" -> Vector( Map( - "name" → "Rusty sword", - "categories" → Vector( - Map("name" → "Cat 4"), - Map("name" → "Cat 6"))), + "name" -> "Rusty sword", + "categories" -> Vector( + Map("name" -> "Cat 4"), + Map("name" -> "Cat 6"))), Map( - "name" → "Magic belt", - "categories" → Vector( - Map("name" → "Cat 4"), - Map("name" → "Cat 5"), - Map("name" → "Cat 7"))), + "name" -> "Magic belt", + "categories" -> Vector( + Map("name" -> "Cat 4"), + Map("name" -> "Cat 5"), + Map("name" -> "Cat 7"))), Map( - "name" → "Health potion", - "categories" → Vector( - Map("name" → "Cat 4"), - Map("name" → "Cat 7")))), - "childrenSeq" → Vector.empty))))), + "name" -> "Health potion", + "categories" -> Vector( + Map("name" -> "Cat 4"), + Map("name" -> "Cat 7")))), + "childrenSeq" -> Vector.empty))))), resolver = DeferredResolver.fetchers(fetcherCat, fetcherProd), userContext = new Repo) @@ -776,12 +776,12 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { } """, Map( - "data" → Map( - "category" → Map( - "productRel" → Map( - "name" → "Rusty sword", - "categoryRel" → Map( - "name" → "Cat 4"))))), + "data" -> Map( + "category" -> Map( + "productRel" -> Map( + "name" -> "Rusty sword", + "categoryRel" -> Map( + "name" -> "Cat 4"))))), resolver = defaultResolver, userContext = new Repo) @@ -795,8 +795,8 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { } } """, - Map("category" → null), - List("Fetcher has not resolved non-optional relation ID '1' for relation 'SimpleRelation(product-category)'." → List(Pos(4, 13))), + Map("category" -> null), + List("Fetcher has not resolved non-optional relation ID '1' for relation 'SimpleRelation(product-category)'." -> List(Pos(4, 13))), resolver = defaultResolver, userContext = new Repo) @@ -833,32 +833,32 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { } """, Map( - "data" → Map( - "c1" → Map( - "productRelOpt" → null, - "productRelSeq" → Vector.empty), - "c2" → Map( - "productRelOpt" → Map( - "name" → "Rusty sword", - "categoryRelOpt" → Map( - "name" → "Cat 4")), - "productRelSeq" → Vector( + "data" -> Map( + "c1" -> Map( + "productRelOpt" -> null, + "productRelSeq" -> Vector.empty), + "c2" -> Map( + "productRelOpt" -> Map( + "name" -> "Rusty sword", + "categoryRelOpt" -> Map( + "name" -> "Cat 4")), + "productRelSeq" -> Vector( Map( - "name" → "Rusty sword", - "categoryRelSeq" → Vector( - Map("name" → "Cat 4"), - Map("name" → "Cat 6"))), + "name" -> "Rusty sword", + "categoryRelSeq" -> Vector( + Map("name" -> "Cat 4"), + Map("name" -> "Cat 6"))), Map( - "name" → "Magic belt", - "categoryRelSeq" → Vector( - Map("name" → "Cat 4"), - Map("name" → "Cat 5"), - Map("name" → "Cat 7"))), + "name" -> "Magic belt", + "categoryRelSeq" -> Vector( + Map("name" -> "Cat 4"), + Map("name" -> "Cat 5"), + Map("name" -> "Cat 7"))), Map( - "name" → "Health potion", - "categoryRelSeq" → Vector( - Map("name" → "Cat 4"), - Map("name" → "Cat 7"))))))), + "name" -> "Health potion", + "categoryRelSeq" -> Vector( + Map("name" -> "Cat 4"), + Map("name" -> "Cat 7"))))))), resolver = defaultResolver, userContext = new Repo) @@ -871,26 +871,26 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { } """, Map( - "data" → Map( - "products" → Vector( + "data" -> Map( + "products" -> Vector( Map( - "id" → 2, - "name" → "Magic belt"), + "id" -> 2, + "name" -> "Magic belt"), Map( - "id" → 4, - "name" → "Unidentified potion"), + "id" -> 4, + "name" -> "Unidentified potion"), Map( - "id" → 1, - "name" → "Rusty sword"), + "id" -> 1, + "name" -> "Rusty sword"), Map( - "id" → 5, - "name" → "Common boots"), + "id" -> 5, + "name" -> "Common boots"), Map( - "id" → 6, - "name" → "Golden ring"), + "id" -> 6, + "name" -> "Golden ring"), Map( - "id" → 3, - "name" → "Health potion")))), + "id" -> 3, + "name" -> "Health potion")))), resolver = defaultResolver, userContext = new Repo) @@ -898,16 +898,16 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { var fetchedProdIds = Vector.empty[Seq[Int]] val fetcherProd = - Fetcher.cachingWithContext[Repo, Product, Int] { (c, ids) ⇒ + Fetcher.cachingWithContext[Repo, Product, Int] { (c, ids) => fetchedProdIds = fetchedProdIds :+ ids c.ctx.loadProducts(ids) } val fetcherCat = - Fetcher.cachingWithContext[Repo, Category, String] { (c, ids) ⇒ - c.ctx.loadCategories(ids).map { categories ⇒ - c.cacheFor(fetcherProd).foreach { productCache ⇒ + Fetcher.cachingWithContext[Repo, Category, String] { (c, ids) => + c.ctx.loadCategories(ids).map { categories => + c.cacheFor(fetcherProd).foreach { productCache => productCache.update(4, Product(4, "Manually Cached", categories.map(_.id).toVector)) } @@ -928,12 +928,12 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { } """, Map( - "data" → Map( - "category" → Map( - "name" → "Cat 5", - "products" → Vector( - Map("name" → "Magic belt"), - Map("name" → "Manually Cached"))))), + "data" -> Map( + "category" -> Map( + "name" -> "Cat 5", + "products" -> Vector( + Map("name" -> "Magic belt"), + Map("name" -> "Manually Cached"))))), resolver = DeferredResolver.fetchers(fetcherCat, fetcherProd), userContext = new Repo) @@ -950,4 +950,4 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { behave like properFetcher (sync.executionContext) } } -} \ No newline at end of file +} diff --git a/src/test/scala/sangria/introspection/IntrospectionSpec.scala b/src/test/scala/sangria/introspection/IntrospectionSpec.scala index 705e3958..dcd548e3 100644 --- a/src/test/scala/sangria/introspection/IntrospectionSpec.scala +++ b/src/test/scala/sangria/introspection/IntrospectionSpec.scala @@ -13,778 +13,778 @@ import scala.util.Success class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport { "Introspection" should { "executes an introspection query" in { - val schema = Schema(ObjectType("QueryRoot", fields[Unit, Unit](Field("foo", IntType, resolve = _ ⇒ 1)))) + val schema = Schema(ObjectType("QueryRoot", fields[Unit, Unit](Field("foo", IntType, resolve = _ => 1)))) Executor.execute(schema, introspectionQuery).await should be (Map( - "data" → Map( - "__schema" → Map( - "queryType" → Map( - "name" → "QueryRoot"), - "mutationType" → null, - "subscriptionType" → null, - "types" → Vector( + "data" -> Map( + "__schema" -> Map( + "queryType" -> Map( + "name" -> "QueryRoot"), + "mutationType" -> null, + "subscriptionType" -> null, + "types" -> Vector( Map( - "kind" → "OBJECT", - "name" → "QueryRoot", - "description" → null, - "fields" → Vector( - Map( - "name" → "foo", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "SCALAR", - "name" → "Int", - "ofType" → null)), - "isDeprecated" → false, - "deprecationReason" → null)), - "inputFields" → null, - "interfaces" → Vector.empty, - "enumValues" → null, - "possibleTypes" → null), + "kind" -> "OBJECT", + "name" -> "QueryRoot", + "description" -> null, + "fields" -> Vector( + Map( + "name" -> "foo", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "SCALAR", + "name" -> "Int", + "ofType" -> null)), + "isDeprecated" -> false, + "deprecationReason" -> null)), + "inputFields" -> null, + "interfaces" -> Vector.empty, + "enumValues" -> null, + "possibleTypes" -> null), Map( - "kind" → "OBJECT", - "name" → "__Directive", - "description" → "A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document.\n\nIn some cases, you need to provide options to alter GraphQL\u2019s execution behavior in ways field arguments will not suffice, such as conditionally including or skipping a field. Directives provide this by describing additional information to the executor.", - "fields" → Vector( - Map( - "name" → "name", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "SCALAR", - "name" → "String", - "ofType" → null)), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "description", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "SCALAR", - "name" → "String", - "ofType" → null), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "locations", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "LIST", - "name" → null, - "ofType" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "ENUM", - "name" → "__DirectiveLocation", - "ofType" → null)))), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "args", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "LIST", - "name" → null, - "ofType" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "OBJECT", - "name" → "__InputValue", - "ofType" → null)))), - "isDeprecated" → false, - "deprecationReason" → null)), - "inputFields" → null, - "interfaces" → Vector.empty, - "enumValues" → null, - "possibleTypes" → null), + "kind" -> "OBJECT", + "name" -> "__Directive", + "description" -> "A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document.\n\nIn some cases, you need to provide options to alter GraphQL\u2019s execution behavior in ways field arguments will not suffice, such as conditionally including or skipping a field. Directives provide this by describing additional information to the executor.", + "fields" -> Vector( + Map( + "name" -> "name", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "SCALAR", + "name" -> "String", + "ofType" -> null)), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "description", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "SCALAR", + "name" -> "String", + "ofType" -> null), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "locations", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "LIST", + "name" -> null, + "ofType" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "ENUM", + "name" -> "__DirectiveLocation", + "ofType" -> null)))), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "args", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "LIST", + "name" -> null, + "ofType" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "OBJECT", + "name" -> "__InputValue", + "ofType" -> null)))), + "isDeprecated" -> false, + "deprecationReason" -> null)), + "inputFields" -> null, + "interfaces" -> Vector.empty, + "enumValues" -> null, + "possibleTypes" -> null), Map( - "kind" → "ENUM", - "name" → "__DirectiveLocation", - "description" → "A Directive can be adjacent to many parts of the GraphQL language, a __DirectiveLocation describes one such possible adjacencies.", - "fields" → null, - "inputFields" → null, - "interfaces" → null, - "enumValues" → Vector( - Map( - "name" → "QUERY", - "description" → "Location adjacent to a query operation.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "MUTATION", - "description" → "Location adjacent to a mutation operation.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "SUBSCRIPTION", - "description" → "Location adjacent to a subscription operation.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "FIELD", - "description" → "Location adjacent to a field.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "FRAGMENT_DEFINITION", - "description" → "Location adjacent to a fragment definition.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "FRAGMENT_SPREAD", - "description" → "Location adjacent to a fragment spread.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "INLINE_FRAGMENT", - "description" → "Location adjacent to an inline fragment.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "VARIABLE_DEFINITION", - "description" → "Location adjacent to a variable definition.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "SCHEMA", - "description" → "Location adjacent to a schema definition.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "SCALAR", - "description" → "Location adjacent to a scalar definition.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "OBJECT", - "description" → "Location adjacent to an object type definition.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "FIELD_DEFINITION", - "description" → "Location adjacent to a field definition.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "ARGUMENT_DEFINITION", - "description" → "Location adjacent to an argument definition.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "INTERFACE", - "description" → "Location adjacent to an interface definition.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "UNION", - "description" → "Location adjacent to a union definition.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "ENUM", - "description" → "Location adjacent to an enum definition.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "ENUM_VALUE", - "description" → "Location adjacent to an enum value definition.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "INPUT_OBJECT", - "description" → "INPUT_OBJECT", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "INPUT_FIELD_DEFINITION", - "description" → "Location adjacent to an input object field definition.", - "isDeprecated" → false, - "deprecationReason" → null)), - "possibleTypes" → null), + "kind" -> "ENUM", + "name" -> "__DirectiveLocation", + "description" -> "A Directive can be adjacent to many parts of the GraphQL language, a __DirectiveLocation describes one such possible adjacencies.", + "fields" -> null, + "inputFields" -> null, + "interfaces" -> null, + "enumValues" -> Vector( + Map( + "name" -> "QUERY", + "description" -> "Location adjacent to a query operation.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "MUTATION", + "description" -> "Location adjacent to a mutation operation.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "SUBSCRIPTION", + "description" -> "Location adjacent to a subscription operation.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "FIELD", + "description" -> "Location adjacent to a field.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "FRAGMENT_DEFINITION", + "description" -> "Location adjacent to a fragment definition.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "FRAGMENT_SPREAD", + "description" -> "Location adjacent to a fragment spread.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "INLINE_FRAGMENT", + "description" -> "Location adjacent to an inline fragment.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "VARIABLE_DEFINITION", + "description" -> "Location adjacent to a variable definition.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "SCHEMA", + "description" -> "Location adjacent to a schema definition.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "SCALAR", + "description" -> "Location adjacent to a scalar definition.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "OBJECT", + "description" -> "Location adjacent to an object type definition.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "FIELD_DEFINITION", + "description" -> "Location adjacent to a field definition.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "ARGUMENT_DEFINITION", + "description" -> "Location adjacent to an argument definition.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "INTERFACE", + "description" -> "Location adjacent to an interface definition.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "UNION", + "description" -> "Location adjacent to a union definition.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "ENUM", + "description" -> "Location adjacent to an enum definition.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "ENUM_VALUE", + "description" -> "Location adjacent to an enum value definition.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "INPUT_OBJECT", + "description" -> "INPUT_OBJECT", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "INPUT_FIELD_DEFINITION", + "description" -> "Location adjacent to an input object field definition.", + "isDeprecated" -> false, + "deprecationReason" -> null)), + "possibleTypes" -> null), Map( - "kind" → "OBJECT", - "name" → "__EnumValue", - "description" → "One possible value for a given Enum. Enum values are unique values, not a placeholder for a string or numeric value. However an Enum value is returned in a JSON response as a string.", - "fields" → Vector( - Map( - "name" → "name", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "SCALAR", - "name" → "String", - "ofType" → null)), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "description", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "SCALAR", - "name" → "String", - "ofType" → null), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "isDeprecated", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "SCALAR", - "name" → "Boolean", - "ofType" → null)), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "deprecationReason", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "SCALAR", - "name" → "String", - "ofType" → null), - "isDeprecated" → false, - "deprecationReason" → null)), - "inputFields" → null, - "interfaces" → Vector.empty, - "enumValues" → null, - "possibleTypes" → null), + "kind" -> "OBJECT", + "name" -> "__EnumValue", + "description" -> "One possible value for a given Enum. Enum values are unique values, not a placeholder for a string or numeric value. However an Enum value is returned in a JSON response as a string.", + "fields" -> Vector( + Map( + "name" -> "name", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "SCALAR", + "name" -> "String", + "ofType" -> null)), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "description", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "SCALAR", + "name" -> "String", + "ofType" -> null), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "isDeprecated", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "SCALAR", + "name" -> "Boolean", + "ofType" -> null)), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "deprecationReason", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "SCALAR", + "name" -> "String", + "ofType" -> null), + "isDeprecated" -> false, + "deprecationReason" -> null)), + "inputFields" -> null, + "interfaces" -> Vector.empty, + "enumValues" -> null, + "possibleTypes" -> null), Map( - "kind" → "OBJECT", - "name" → "__Field", - "description" → "Object and Interface types are described by a list of Fields, each of which has a name, potentially a list of arguments, and a return type.", - "fields" → Vector( - Map( - "name" → "name", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "SCALAR", - "name" → "String", - "ofType" → null)), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "description", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "SCALAR", - "name" → "String", - "ofType" → null), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "args", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "LIST", - "name" → null, - "ofType" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "OBJECT", - "name" → "__InputValue", - "ofType" → null)))), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "type", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "OBJECT", - "name" → "__Type", - "ofType" → null)), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "isDeprecated", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "SCALAR", - "name" → "Boolean", - "ofType" → null)), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "deprecationReason", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "SCALAR", - "name" → "String", - "ofType" → null), - "isDeprecated" → false, - "deprecationReason" → null)), - "inputFields" → null, - "interfaces" → Vector.empty, - "enumValues" → null, - "possibleTypes" → null), + "kind" -> "OBJECT", + "name" -> "__Field", + "description" -> "Object and Interface types are described by a list of Fields, each of which has a name, potentially a list of arguments, and a return type.", + "fields" -> Vector( + Map( + "name" -> "name", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "SCALAR", + "name" -> "String", + "ofType" -> null)), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "description", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "SCALAR", + "name" -> "String", + "ofType" -> null), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "args", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "LIST", + "name" -> null, + "ofType" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "OBJECT", + "name" -> "__InputValue", + "ofType" -> null)))), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "type", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "OBJECT", + "name" -> "__Type", + "ofType" -> null)), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "isDeprecated", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "SCALAR", + "name" -> "Boolean", + "ofType" -> null)), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "deprecationReason", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "SCALAR", + "name" -> "String", + "ofType" -> null), + "isDeprecated" -> false, + "deprecationReason" -> null)), + "inputFields" -> null, + "interfaces" -> Vector.empty, + "enumValues" -> null, + "possibleTypes" -> null), Map( - "kind" → "OBJECT", - "name" → "__InputValue", - "description" → "Arguments provided to Fields or Directives and the input fields of an InputObject are represented as Input Values which describe their type and optionally a default value.", - "fields" → Vector( - Map( - "name" → "name", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "SCALAR", - "name" → "String", - "ofType" → null)), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "description", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "SCALAR", - "name" → "String", - "ofType" → null), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "type", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "OBJECT", - "name" → "__Type", - "ofType" → null)), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "defaultValue", - "description" → "A GraphQL-formatted string representing the default value for this input value.", - "args" → Vector.empty, - "type" → Map( - "kind" → "SCALAR", - "name" → "String", - "ofType" → null), - "isDeprecated" → false, - "deprecationReason" → null)), - "inputFields" → null, - "interfaces" → Vector.empty, - "enumValues" → null, - "possibleTypes" → null), + "kind" -> "OBJECT", + "name" -> "__InputValue", + "description" -> "Arguments provided to Fields or Directives and the input fields of an InputObject are represented as Input Values which describe their type and optionally a default value.", + "fields" -> Vector( + Map( + "name" -> "name", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "SCALAR", + "name" -> "String", + "ofType" -> null)), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "description", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "SCALAR", + "name" -> "String", + "ofType" -> null), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "type", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "OBJECT", + "name" -> "__Type", + "ofType" -> null)), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "defaultValue", + "description" -> "A GraphQL-formatted string representing the default value for this input value.", + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "SCALAR", + "name" -> "String", + "ofType" -> null), + "isDeprecated" -> false, + "deprecationReason" -> null)), + "inputFields" -> null, + "interfaces" -> Vector.empty, + "enumValues" -> null, + "possibleTypes" -> null), Map( - "kind" → "OBJECT", - "name" → "__Schema", - "description" → "A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all available types and directives on the server, as well as the entry points for query, mutation, and subscription operations.", - "fields" → Vector( - Map( - "name" → "description", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "SCALAR", - "name" → "String", - "ofType" → null), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "types", - "description" → "A list of all types supported by this server.", - "args" → Vector.empty, - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "LIST", - "name" → null, - "ofType" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "OBJECT", - "name" → "__Type", - "ofType" → null)))), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "queryType", - "description" → "The type that query operations will be rooted at.", - "args" → Vector.empty, - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "OBJECT", - "name" → "__Type", - "ofType" → null)), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "mutationType", - "description" → "If this server supports mutation, the type that mutation operations will be rooted at.", - "args" → Vector.empty, - "type" → Map( - "kind" → "OBJECT", - "name" → "__Type", - "ofType" → null), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "subscriptionType", - "description" → "If this server support subscription, the type that subscription operations will be rooted at.", - "args" → Vector.empty, - "type" → Map( - "kind" → "OBJECT", - "name" → "__Type", - "ofType" → null), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "directives", - "description" → "A list of all directives supported by this server.", - "args" → Vector.empty, - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "LIST", - "name" → null, - "ofType" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "OBJECT", - "name" → "__Directive", - "ofType" → null)))), - "isDeprecated" → false, - "deprecationReason" → null)), - "inputFields" → null, - "interfaces" → Vector.empty, - "enumValues" → null, - "possibleTypes" → null), + "kind" -> "OBJECT", + "name" -> "__Schema", + "description" -> "A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all available types and directives on the server, as well as the entry points for query, mutation, and subscription operations.", + "fields" -> Vector( + Map( + "name" -> "description", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "SCALAR", + "name" -> "String", + "ofType" -> null), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "types", + "description" -> "A list of all types supported by this server.", + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "LIST", + "name" -> null, + "ofType" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "OBJECT", + "name" -> "__Type", + "ofType" -> null)))), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "queryType", + "description" -> "The type that query operations will be rooted at.", + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "OBJECT", + "name" -> "__Type", + "ofType" -> null)), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "mutationType", + "description" -> "If this server supports mutation, the type that mutation operations will be rooted at.", + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "OBJECT", + "name" -> "__Type", + "ofType" -> null), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "subscriptionType", + "description" -> "If this server support subscription, the type that subscription operations will be rooted at.", + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "OBJECT", + "name" -> "__Type", + "ofType" -> null), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "directives", + "description" -> "A list of all directives supported by this server.", + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "LIST", + "name" -> null, + "ofType" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "OBJECT", + "name" -> "__Directive", + "ofType" -> null)))), + "isDeprecated" -> false, + "deprecationReason" -> null)), + "inputFields" -> null, + "interfaces" -> Vector.empty, + "enumValues" -> null, + "possibleTypes" -> null), Map( - "kind" → "OBJECT", - "name" → "__Type", - "description" → "The fundamental unit of any GraphQL Schema is the type. There are many kinds of types in GraphQL as represented by the `__TypeKind` enum.\n\nDepending on the kind of a type, certain fields describe information about that type. Scalar types provide no information beyond a name and description, while Enum types provide their values. Object and Interface types provide the fields they describe. Abstract types, Union and Interface, provide the Object types possible at runtime. List and NonNull types compose other types.", - "fields" → Vector( - Map( - "name" → "kind", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "ENUM", - "name" → "__TypeKind", - "ofType" → null)), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "name", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "SCALAR", - "name" → "String", - "ofType" → null), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "description", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "SCALAR", - "name" → "String", - "ofType" → null), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "fields", - "description" → null, - "args" → Vector( + "kind" -> "OBJECT", + "name" -> "__Type", + "description" -> "The fundamental unit of any GraphQL Schema is the type. There are many kinds of types in GraphQL as represented by the `__TypeKind` enum.\n\nDepending on the kind of a type, certain fields describe information about that type. Scalar types provide no information beyond a name and description, while Enum types provide their values. Object and Interface types provide the fields they describe. Abstract types, Union and Interface, provide the Object types possible at runtime. List and NonNull types compose other types.", + "fields" -> Vector( + Map( + "name" -> "kind", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "ENUM", + "name" -> "__TypeKind", + "ofType" -> null)), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "name", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "SCALAR", + "name" -> "String", + "ofType" -> null), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "description", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "SCALAR", + "name" -> "String", + "ofType" -> null), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "fields", + "description" -> null, + "args" -> Vector( Map( - "name" → "includeDeprecated", - "description" → null, - "type" → Map( - "kind" → "SCALAR", - "name" → "Boolean", - "ofType" → null), - "defaultValue" → "false")), - "type" → Map( - "kind" → "LIST", - "name" → null, - "ofType" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "OBJECT", - "name" → "__Field", - "ofType" → null))), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "interfaces", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "LIST", - "name" → null, - "ofType" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "OBJECT", - "name" → "__Type", - "ofType" → null))), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "possibleTypes", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "LIST", - "name" → null, - "ofType" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "OBJECT", - "name" → "__Type", - "ofType" → null))), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "enumValues", - "description" → null, - "args" → Vector( + "name" -> "includeDeprecated", + "description" -> null, + "type" -> Map( + "kind" -> "SCALAR", + "name" -> "Boolean", + "ofType" -> null), + "defaultValue" -> "false")), + "type" -> Map( + "kind" -> "LIST", + "name" -> null, + "ofType" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "OBJECT", + "name" -> "__Field", + "ofType" -> null))), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "interfaces", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "LIST", + "name" -> null, + "ofType" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "OBJECT", + "name" -> "__Type", + "ofType" -> null))), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "possibleTypes", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "LIST", + "name" -> null, + "ofType" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "OBJECT", + "name" -> "__Type", + "ofType" -> null))), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "enumValues", + "description" -> null, + "args" -> Vector( Map( - "name" → "includeDeprecated", - "description" → null, - "type" → Map( - "kind" → "SCALAR", - "name" → "Boolean", - "ofType" → null), - "defaultValue" → "false")), - "type" → Map( - "kind" → "LIST", - "name" → null, - "ofType" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "OBJECT", - "name" → "__EnumValue", - "ofType" → null))), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "inputFields", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "LIST", - "name" → null, - "ofType" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "OBJECT", - "name" → "__InputValue", - "ofType" → null))), - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "ofType", - "description" → null, - "args" → Vector.empty, - "type" → Map( - "kind" → "OBJECT", - "name" → "__Type", - "ofType" → null), - "isDeprecated" → false, - "deprecationReason" → null)), - "inputFields" → null, - "interfaces" → Vector.empty, - "enumValues" → null, - "possibleTypes" → null), + "name" -> "includeDeprecated", + "description" -> null, + "type" -> Map( + "kind" -> "SCALAR", + "name" -> "Boolean", + "ofType" -> null), + "defaultValue" -> "false")), + "type" -> Map( + "kind" -> "LIST", + "name" -> null, + "ofType" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "OBJECT", + "name" -> "__EnumValue", + "ofType" -> null))), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "inputFields", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "LIST", + "name" -> null, + "ofType" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "OBJECT", + "name" -> "__InputValue", + "ofType" -> null))), + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "ofType", + "description" -> null, + "args" -> Vector.empty, + "type" -> Map( + "kind" -> "OBJECT", + "name" -> "__Type", + "ofType" -> null), + "isDeprecated" -> false, + "deprecationReason" -> null)), + "inputFields" -> null, + "interfaces" -> Vector.empty, + "enumValues" -> null, + "possibleTypes" -> null), Map( - "kind" → "ENUM", - "name" → "__TypeKind", - "description" → "An enum describing what kind of type a given `__Type` is.", - "fields" → null, - "inputFields" → null, - "interfaces" → null, - "enumValues" → Vector( - Map( - "name" → "SCALAR", - "description" → "Indicates this type is a scalar.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "OBJECT", - "description" → "Indicates this type is an object. `fields` and `interfaces` are valid fields.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "INTERFACE", - "description" → "Indicates this type is an interface. `fields` and `possibleTypes` are valid fields.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "UNION", - "description" → "Indicates this type is a union. `possibleTypes` is a valid field.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "ENUM", - "description" → "Indicates this type is an enum. `enumValues` is a valid field.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "INPUT_OBJECT", - "description" → "Indicates this type is an input object. `inputFields` is a valid field.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "LIST", - "description" → "Indicates this type is a list. `ofType` is a valid field.", - "isDeprecated" → false, - "deprecationReason" → null), - Map( - "name" → "NON_NULL", - "description" → "Indicates this type is a non-null. `ofType` is a valid field.", - "isDeprecated" → false, - "deprecationReason" → null)), - "possibleTypes" → null), + "kind" -> "ENUM", + "name" -> "__TypeKind", + "description" -> "An enum describing what kind of type a given `__Type` is.", + "fields" -> null, + "inputFields" -> null, + "interfaces" -> null, + "enumValues" -> Vector( + Map( + "name" -> "SCALAR", + "description" -> "Indicates this type is a scalar.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "OBJECT", + "description" -> "Indicates this type is an object. `fields` and `interfaces` are valid fields.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "INTERFACE", + "description" -> "Indicates this type is an interface. `fields` and `possibleTypes` are valid fields.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "UNION", + "description" -> "Indicates this type is a union. `possibleTypes` is a valid field.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "ENUM", + "description" -> "Indicates this type is an enum. `enumValues` is a valid field.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "INPUT_OBJECT", + "description" -> "Indicates this type is an input object. `inputFields` is a valid field.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "LIST", + "description" -> "Indicates this type is a list. `ofType` is a valid field.", + "isDeprecated" -> false, + "deprecationReason" -> null), + Map( + "name" -> "NON_NULL", + "description" -> "Indicates this type is a non-null. `ofType` is a valid field.", + "isDeprecated" -> false, + "deprecationReason" -> null)), + "possibleTypes" -> null), Map( - "kind" → "SCALAR", - "name" → "Boolean", - "description" → "The `Boolean` scalar type represents `true` or `false`.", - "fields" → null, - "inputFields" → null, - "interfaces" → null, - "enumValues" → null, - "possibleTypes" → null), + "kind" -> "SCALAR", + "name" -> "Boolean", + "description" -> "The `Boolean` scalar type represents `true` or `false`.", + "fields" -> null, + "inputFields" -> null, + "interfaces" -> null, + "enumValues" -> null, + "possibleTypes" -> null), Map( - "kind" → "SCALAR", - "name" → "Int", - "description" → "The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.", - "fields" → null, - "inputFields" → null, - "interfaces" → null, - "enumValues" → null, - "possibleTypes" → null), + "kind" -> "SCALAR", + "name" -> "Int", + "description" -> "The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.", + "fields" -> null, + "inputFields" -> null, + "interfaces" -> null, + "enumValues" -> null, + "possibleTypes" -> null), Map( - "kind" → "SCALAR", - "name" → "String", - "description" → "The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.", - "fields" → null, - "inputFields" → null, - "interfaces" → null, - "enumValues" → null, - "possibleTypes" → null)), - "directives" → Vector( + "kind" -> "SCALAR", + "name" -> "String", + "description" -> "The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.", + "fields" -> null, + "inputFields" -> null, + "interfaces" -> null, + "enumValues" -> null, + "possibleTypes" -> null)), + "directives" -> Vector( Map( - "name" → "include", - "description" → "Directs the executor to include this field or fragment only when the `if` argument is true.", - "locations" → Vector( + "name" -> "include", + "description" -> "Directs the executor to include this field or fragment only when the `if` argument is true.", + "locations" -> Vector( "FIELD", "FRAGMENT_SPREAD", "INLINE_FRAGMENT"), - "args" → Vector( - Map( - "name" → "if", - "description" → "Included when true.", - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "SCALAR", - "name" → "Boolean", - "ofType" → null)), - "defaultValue" → null))), + "args" -> Vector( + Map( + "name" -> "if", + "description" -> "Included when true.", + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "SCALAR", + "name" -> "Boolean", + "ofType" -> null)), + "defaultValue" -> null))), Map( - "name" → "skip", - "description" → "Directs the executor to skip this field or fragment when the `if` argument is true.", - "locations" → Vector( + "name" -> "skip", + "description" -> "Directs the executor to skip this field or fragment when the `if` argument is true.", + "locations" -> Vector( "FIELD", "FRAGMENT_SPREAD", "INLINE_FRAGMENT"), - "args" → Vector( - Map( - "name" → "if", - "description" → "Included when true.", - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "SCALAR", - "name" → "Boolean", - "ofType" → null)), - "defaultValue" → null))), + "args" -> Vector( + Map( + "name" -> "if", + "description" -> "Included when true.", + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "SCALAR", + "name" -> "Boolean", + "ofType" -> null)), + "defaultValue" -> null))), Map( - "name" → "deprecated", - "description" → "Marks an element of a GraphQL schema as no longer supported.", - "locations" → Vector( + "name" -> "deprecated", + "description" -> "Marks an element of a GraphQL schema as no longer supported.", + "locations" -> Vector( "ENUM_VALUE", "FIELD_DEFINITION"), - "args" → Vector( - Map( - "name" → "reason", - "description" → "Explains why this element was deprecated, usually also including a suggestion for how to access supported similar data. Formatted in [Markdown](https://daringfireball.net/projects/markdown/).", - "type" → Map( - "kind" → "SCALAR", - "name" → "String", - "ofType" → null), - "defaultValue" → "\"No longer supported\"")))), - "description" → null)))) + "args" -> Vector( + Map( + "name" -> "reason", + "description" -> "Explains why this element was deprecated, usually also including a suggestion for how to access supported similar data. Formatted in [Markdown](https://daringfireball.net/projects/markdown/).", + "type" -> Map( + "kind" -> "SCALAR", + "name" -> "String", + "ofType" -> null), + "defaultValue" -> "\"No longer supported\"")))), + "description" -> null)))) } "introspects on input object" in { @@ -796,7 +796,7 @@ class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport val testType = ObjectType("TestType", fields[Unit, Unit]( Field("field", OptionType(StringType), arguments = Argument("complex", OptionInputType(inputType)) :: Nil, - resolve = _ ⇒ None) + resolve = _ => None) )) val schema = Schema(testType) @@ -838,94 +838,94 @@ class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport val BuiltInTypes = List( Map( - "kind" → "OBJECT", - "name" → "TestType", - "inputFields" → null + "kind" -> "OBJECT", + "name" -> "TestType", + "inputFields" -> null ), Map( - "kind" → "OBJECT", - "name" → "__Directive", - "inputFields" → null + "kind" -> "OBJECT", + "name" -> "__Directive", + "inputFields" -> null ), Map( - "kind" → "ENUM", - "name" → "__DirectiveLocation", - "inputFields" → null + "kind" -> "ENUM", + "name" -> "__DirectiveLocation", + "inputFields" -> null ), Map( - "kind" → "OBJECT", - "name" → "__EnumValue", - "inputFields" → null + "kind" -> "OBJECT", + "name" -> "__EnumValue", + "inputFields" -> null ), Map( - "kind" → "OBJECT", - "name" → "__Field", - "inputFields" → null + "kind" -> "OBJECT", + "name" -> "__Field", + "inputFields" -> null ), Map( - "kind" → "OBJECT", - "name" → "__InputValue", - "inputFields" → null + "kind" -> "OBJECT", + "name" -> "__InputValue", + "inputFields" -> null ), Map( - "kind" → "OBJECT", - "name" → "__Schema", - "inputFields" → null + "kind" -> "OBJECT", + "name" -> "__Schema", + "inputFields" -> null ), Map( - "kind" → "OBJECT", - "name" → "__Type", - "inputFields" → null + "kind" -> "OBJECT", + "name" -> "__Type", + "inputFields" -> null ), Map( - "kind" → "ENUM", - "name" → "__TypeKind", - "inputFields" → null + "kind" -> "ENUM", + "name" -> "__TypeKind", + "inputFields" -> null ), Map( - "kind" → "SCALAR", - "name" → "Boolean", - "inputFields" → null + "kind" -> "SCALAR", + "name" -> "Boolean", + "inputFields" -> null ), Map( - "kind" → "SCALAR", - "name" → "String", - "inputFields" → null + "kind" -> "SCALAR", + "name" -> "String", + "inputFields" -> null ) ) Executor.execute(schema, query).await should be (Map( - "data" → Map( - "__schema" → Map( - "types" → (List( + "data" -> Map( + "__schema" -> Map( + "types" -> (List( Map( - "kind" → "INPUT_OBJECT", - "name" → "TestInputObject", - "inputFields" → List( + "kind" -> "INPUT_OBJECT", + "name" -> "TestInputObject", + "inputFields" -> List( Map( - "name" → "a", - "type" → + "name" -> "a", + "type" -> Map( - "kind" → "SCALAR", - "name" → "String", - "ofType" → null + "kind" -> "SCALAR", + "name" -> "String", + "ofType" -> null ), - "defaultValue" → "\"foo\"" + "defaultValue" -> "\"foo\"" ), Map( - "name" → "b", - "type" → + "name" -> "b", + "type" -> Map( - "kind" → "LIST", - "name" → null, - "ofType" → + "kind" -> "LIST", + "name" -> null, + "ofType" -> Map( - "kind" → "SCALAR", - "name" → "String", - "ofType" → null + "kind" -> "SCALAR", + "name" -> "String", + "ofType" -> null ) ), - "defaultValue" → null + "defaultValue" -> null ) ) ) @@ -937,7 +937,7 @@ class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport "supports the __type root field" in { val testType = ObjectType("TestType", fields[Unit, Unit]( - Field("testField", OptionType(StringType), resolve = _ ⇒ None) + Field("testField", OptionType(StringType), resolve = _ => None) )) val schema = Schema(testType) @@ -953,9 +953,9 @@ class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport ) Executor.execute(schema, query).await should be (Map( - "data" → Map( - "__type" → Map( - "name" → "TestType" + "data" -> Map( + "__type" -> Map( + "name" -> "TestType" ) ) )) @@ -963,8 +963,8 @@ class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport "identifies deprecated fields" in { val testType = ObjectType("TestType", fields[Unit, Unit]( - Field("nonDeprecated", OptionType(StringType), resolve = _ ⇒ None), - Field("deprecated", OptionType(StringType), deprecationReason = Some("Removed in 1.0"), resolve = _ ⇒ None) + Field("nonDeprecated", OptionType(StringType), resolve = _ => None), + Field("deprecated", OptionType(StringType), deprecationReason = Some("Removed in 1.0"), resolve = _ => None) )) val schema = Schema(testType) @@ -985,19 +985,19 @@ class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport ) Executor.execute(schema, query).await should be (Map( - "data" → Map( - "__type" → Map( - "name" → "TestType", - "fields" → List( + "data" -> Map( + "__type" -> Map( + "name" -> "TestType", + "fields" -> List( Map( - "name" → "nonDeprecated", - "isDeprecated" → false, - "deprecationReason" → null + "name" -> "nonDeprecated", + "isDeprecated" -> false, + "deprecationReason" -> null ), Map( - "name" → "deprecated", - "isDeprecated" → true, - "deprecationReason" → "Removed in 1.0" + "name" -> "deprecated", + "isDeprecated" -> true, + "deprecationReason" -> "Removed in 1.0" ) ) ) @@ -1007,8 +1007,8 @@ class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport "respects the includeDeprecated parameter for fields" in { val testType = ObjectType("TestType", fields[Unit, Unit]( - Field("nonDeprecated", OptionType(StringType), resolve = _ ⇒ None), - Field("deprecated", OptionType(StringType), deprecationReason = Some("Removed in 1.0"), resolve = _ ⇒ None) + Field("nonDeprecated", OptionType(StringType), resolve = _ => None), + Field("deprecated", OptionType(StringType), deprecationReason = Some("Removed in 1.0"), resolve = _ => None) )) val schema = Schema(testType) @@ -1033,25 +1033,25 @@ class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport ) Executor.execute(schema, query).await should be (Map( - "data" → Map( - "__type" → Map( - "name" → "TestType", - "trueFields" → List( + "data" -> Map( + "__type" -> Map( + "name" -> "TestType", + "trueFields" -> List( Map( - "name" → "nonDeprecated" + "name" -> "nonDeprecated" ), Map( - "name" → "deprecated" + "name" -> "deprecated" ) ), - "falseFields" → List( + "falseFields" -> List( Map( - "name" → "nonDeprecated" + "name" -> "nonDeprecated" ) ), - "omittedFields" → List( + "omittedFields" -> List( Map( - "name" → "nonDeprecated" + "name" -> "nonDeprecated" ) ) ) @@ -1066,7 +1066,7 @@ class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport EnumValue("ALSONONDEPRECATED", value = 3))) val testType = ObjectType("TestType", fields[Unit, Unit]( - Field("testEnum", OptionType(testEnum), resolve = _ ⇒ None) + Field("testEnum", OptionType(testEnum), resolve = _ => None) )) val schema = Schema(testType) @@ -1087,24 +1087,24 @@ class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport ) Executor.execute(schema, query).await should be (Map( - "data" → Map( - "__type" → Map( - "name" → "TestEnum", - "enumValues" → List( + "data" -> Map( + "__type" -> Map( + "name" -> "TestEnum", + "enumValues" -> List( Map( - "name" → "NONDEPRECATED", - "isDeprecated" → false, - "deprecationReason" → null + "name" -> "NONDEPRECATED", + "isDeprecated" -> false, + "deprecationReason" -> null ), Map( - "name" → "DEPRECATED", - "isDeprecated" → true, - "deprecationReason" → "Removed in 1.0" + "name" -> "DEPRECATED", + "isDeprecated" -> true, + "deprecationReason" -> "Removed in 1.0" ), Map( - "name" → "ALSONONDEPRECATED", - "isDeprecated" → false, - "deprecationReason" → null + "name" -> "ALSONONDEPRECATED", + "isDeprecated" -> false, + "deprecationReason" -> null ) ) ) @@ -1119,7 +1119,7 @@ class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport EnumValue("ALSONONDEPRECATED", value = 3))) val testType = ObjectType("TestType", fields[Unit, Unit]( - Field("testEnum", OptionType(testEnum), resolve = _ ⇒ None) + Field("testEnum", OptionType(testEnum), resolve = _ => None) )) val schema = Schema(testType) @@ -1144,34 +1144,34 @@ class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport ) Executor.execute(schema, query).await should be (Map( - "data" → Map( - "__type" → Map( - "name" → "TestEnum", - "trueValues" → List( + "data" -> Map( + "__type" -> Map( + "name" -> "TestEnum", + "trueValues" -> List( Map( - "name" → "NONDEPRECATED" + "name" -> "NONDEPRECATED" ), Map( - "name" → "DEPRECATED" + "name" -> "DEPRECATED" ), Map( - "name" → "ALSONONDEPRECATED" + "name" -> "ALSONONDEPRECATED" ) ), - "falseValues" → List( + "falseValues" -> List( Map( - "name" → "NONDEPRECATED" + "name" -> "NONDEPRECATED" ), Map( - "name" → "ALSONONDEPRECATED" + "name" -> "ALSONONDEPRECATED" ) ), - "omittedValues" → List( + "omittedValues" -> List( Map( - "name" → "NONDEPRECATED" + "name" -> "NONDEPRECATED" ), Map( - "name" → "ALSONONDEPRECATED" + "name" -> "ALSONONDEPRECATED" ) ) ) @@ -1181,7 +1181,7 @@ class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport "fails as expected on the __type root field without an arg" in { val testType = ObjectType("TestType", fields[Unit, Unit]( - Field("testField", OptionType(StringType), resolve = _ ⇒ None) + Field("testField", OptionType(StringType), resolve = _ => None) )) val schema = Schema(testType) @@ -1198,13 +1198,13 @@ class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport val result = Executor.execute(schema, query, queryValidator = QueryValidator.empty).await.asInstanceOf[Map[String, Any]] - result("data") should be (Map("__type" → null)) + result("data") should be (Map("__type" -> null)) result("errors").asInstanceOf[Seq[Map[String, Any]]](0)("message").asInstanceOf[String] should include ( "Null value was provided for the NotNull Type 'String!' at path 'name'.") } "exposes descriptions on types and fields" in { - val schema = Schema(ObjectType("QueryRoot", fields[Unit, Unit](Field("foo", IntType, resolve = _ ⇒ 1)))) + val schema = Schema(ObjectType("QueryRoot", fields[Unit, Unit](Field("foo", IntType, resolve = _ => 1)))) val Success(query) = QueryParser.parse( """ @@ -1221,33 +1221,33 @@ class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport """) Executor.execute(schema, query).await should be (Map( - "data" → Map( - "schemaType" → Map( - "name" → "__Schema", - "description" → "A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all available types and directives on the server, as well as the entry points for query, mutation, and subscription operations.", - "fields" → Vector( + "data" -> Map( + "schemaType" -> Map( + "name" -> "__Schema", + "description" -> "A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all available types and directives on the server, as well as the entry points for query, mutation, and subscription operations.", + "fields" -> Vector( Map( - "name" → "description", - "description" → null), + "name" -> "description", + "description" -> null), Map( - "name" → "types", - "description" → "A list of all types supported by this server."), + "name" -> "types", + "description" -> "A list of all types supported by this server."), Map( - "name" → "queryType", - "description" → "The type that query operations will be rooted at."), + "name" -> "queryType", + "description" -> "The type that query operations will be rooted at."), Map( - "name" → "mutationType", - "description" → "If this server supports mutation, the type that mutation operations will be rooted at."), + "name" -> "mutationType", + "description" -> "If this server supports mutation, the type that mutation operations will be rooted at."), Map( - "name" → "subscriptionType", - "description" → "If this server support subscription, the type that subscription operations will be rooted at."), + "name" -> "subscriptionType", + "description" -> "If this server support subscription, the type that subscription operations will be rooted at."), Map( - "name" → "directives", - "description" → "A list of all directives supported by this server.")))))) + "name" -> "directives", + "description" -> "A list of all directives supported by this server.")))))) } "exposes description on schema" in { - val schema = Schema(ObjectType("QueryRoot", fields[Unit, Unit](Field("foo", IntType, resolve = _ ⇒ 1))), + val schema = Schema(ObjectType("QueryRoot", fields[Unit, Unit](Field("foo", IntType, resolve = _ => 1))), description = Some("test schema")) val Success(query) = QueryParser.parse( @@ -1260,13 +1260,13 @@ class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport """) Executor.execute(schema, query).await should be (Map( - "data" → Map( - "__schema" → Map( - "description" → "test schema")))) + "data" -> Map( + "__schema" -> Map( + "description" -> "test schema")))) } "exposes descriptions on enums" in { - val schema = Schema(ObjectType("QueryRoot", fields[Unit, Unit](Field("foo", IntType, resolve = _ ⇒ 1)))) + val schema = Schema(ObjectType("QueryRoot", fields[Unit, Unit](Field("foo", IntType, resolve = _ => 1)))) val Success(query) = QueryParser.parse( """ @@ -1283,56 +1283,56 @@ class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport """) Executor.execute(schema, query).await should be (Map( - "data" → Map( - "typeKindType" → Map( - "name" → "__TypeKind", - "description" → "An enum describing what kind of type a given `__Type` is.", - "enumValues" → List( + "data" -> Map( + "typeKindType" -> Map( + "name" -> "__TypeKind", + "description" -> "An enum describing what kind of type a given `__Type` is.", + "enumValues" -> List( Map( - "description" → "Indicates this type is a scalar.", - "name" → "SCALAR" + "description" -> "Indicates this type is a scalar.", + "name" -> "SCALAR" ), Map( - "description" → ( + "description" -> ( "Indicates this type is an object. " + "`fields` and `interfaces` are valid fields."), - "name" → "OBJECT" + "name" -> "OBJECT" ), Map( - "description" → ( + "description" -> ( "Indicates this type is an interface. " + "`fields` and `possibleTypes` are valid fields."), - "name" → "INTERFACE" + "name" -> "INTERFACE" ), Map( - "description" → ( + "description" -> ( "Indicates this type is a union. " + "`possibleTypes` is a valid field."), - "name" → "UNION" + "name" -> "UNION" ), Map( - "description" → ( + "description" -> ( "Indicates this type is an enum. " + "`enumValues` is a valid field."), - "name" → "ENUM" + "name" -> "ENUM" ), Map( - "description" → ( + "description" -> ( "Indicates this type is an input object. " + "`inputFields` is a valid field."), - "name" → "INPUT_OBJECT" + "name" -> "INPUT_OBJECT" ), Map( - "description" → ( + "description" -> ( "Indicates this type is a list. " + "`ofType` is a valid field."), - "name" → "LIST" + "name" -> "LIST" ), Map( - "description" → ( + "description" -> ( "Indicates this type is a non-null. " + "`ofType` is a valid field."), - "name" → "NON_NULL" + "name" -> "NON_NULL" ) ) ) @@ -1340,4 +1340,4 @@ class IntrospectionSpec extends WordSpec with Matchers with FutureResultSupport )) } } -} \ No newline at end of file +} diff --git a/src/test/scala/sangria/macros/derive/DeriveInputObjectTypeMacroSpec.scala b/src/test/scala/sangria/macros/derive/DeriveInputObjectTypeMacroSpec.scala index 23818bba..ca943b98 100644 --- a/src/test/scala/sangria/macros/derive/DeriveInputObjectTypeMacroSpec.scala +++ b/src/test/scala/sangria/macros/derive/DeriveInputObjectTypeMacroSpec.scala @@ -80,10 +80,10 @@ class DeriveInputObjectTypeMacroSpec extends WordSpec with Matchers with FutureR "expose case class fields" in { val tpe = deriveInputObjectType[TestInputObj]() - tpe.fields.sortBy(_.name).map(f ⇒ f.name → f.fieldType) should be (List( - "excluded" → OptionInputType(ListInputType(OptionInputType(IntType))), - "id" → StringType, - "list" → ListInputType(StringType))) + tpe.fields.sortBy(_.name).map(f => f.name -> f.fieldType) should be (List( + "excluded" -> OptionInputType(ListInputType(OptionInputType(IntType))), + "id" -> StringType, + "list" -> ListInputType(StringType))) } "validate known field names" in { @@ -121,9 +121,9 @@ class DeriveInputObjectTypeMacroSpec extends WordSpec with Matchers with FutureR contain("ID") and contain("MYLIST")) - val transformer2 = (s: String) ⇒ s.zipWithIndex.map { - case (c, i) if i % 2 == 0 ⇒ c.toLower - case (c, _) ⇒ c.toUpper + val transformer2 = (s: String) => s.zipWithIndex.map { + case (c, i) if i % 2 == 0 => c.toLower + case (c, _) => c.toUpper }.mkString("") val tpe2 = deriveInputObjectType[TestInputObjAnnotated]( @@ -211,7 +211,7 @@ class DeriveInputObjectTypeMacroSpec extends WordSpec with Matchers with FutureR graphql"""{foo(a: {id: 21, b: {name: "it's b", a: {id: 34}, b: {name: "another", a: {id: 56}}}})}""" Executor.execute(schema, query, root = new Query).await should be ( - JsObject("data" → JsObject("foo" → + JsObject("data" -> JsObject("foo" -> JsString("A(21,Some(B(it's b,A(34,None),Some(B(another,A(56,None),None)))))")))) } @@ -234,14 +234,14 @@ class DeriveInputObjectTypeMacroSpec extends WordSpec with Matchers with FutureR implicit lazy val TestNestedType = deriveInputObjectType[TestNested]() implicit lazy val TestDefaultsType = deriveInputObjectType[TestDefaults]() - TestDeeperType.fields.sortBy(_.name).map(f ⇒ f.name → f.fieldType) should be (List( - "foo" → OptionInputType(IntType), - "s" → StringType)) + TestDeeperType.fields.sortBy(_.name).map(f => f.name -> f.fieldType) should be (List( + "foo" -> OptionInputType(IntType), + "s" -> StringType)) - TestNestedType.fields.sortBy(_.name).map(f ⇒ f.name → f.fieldType) should be (List( - "deeper" → OptionInputType(ListInputType(TestDeeperType)), - "name" → OptionInputType(ListInputType(IntType)), - "stub" → TestDeeperType)) + TestNestedType.fields.sortBy(_.name).map(f => f.name -> f.fieldType) should be (List( + "deeper" -> OptionInputType(ListInputType(TestDeeperType)), + "name" -> OptionInputType(ListInputType(IntType)), + "stub" -> TestDeeperType)) val QueryType = deriveObjectType[Unit, Query]() @@ -250,7 +250,7 @@ class DeriveInputObjectTypeMacroSpec extends WordSpec with Matchers with FutureR val query = graphql"""{foo(a: {nested: {stub: {s: "foo"}}})}""" Executor.execute(schema, query, root = new Query).await should be ( - JsObject("data" → JsObject("foo" → + JsObject("data" -> JsObject("foo" -> JsString("TestDefaults(fgh,None,324,TestNested(TestDeeper(foo,123),List(3, 4, 5),Some(List(TestDeeper(aa,1)))),Some(List(TestNested(TestDeeper(ee,1),List(1),Some(List(TestDeeper(aa,1)))), TestNested(TestDeeper(ff,1),List(1),Some(List(TestDeeper(aa,1)))))))")))) val intro = IntrospectionParser.parse(Executor.execute(schema, sangria.introspection.introspectionQuery, root = new Query).await) diff --git a/src/test/scala/sangria/macros/derive/DeriveObjectTypeMacroSpec.scala b/src/test/scala/sangria/macros/derive/DeriveObjectTypeMacroSpec.scala index f3cc05fa..feba37c1 100644 --- a/src/test/scala/sangria/macros/derive/DeriveObjectTypeMacroSpec.scala +++ b/src/test/scala/sangria/macros/derive/DeriveObjectTypeMacroSpec.scala @@ -106,7 +106,7 @@ class DeriveObjectTypeMacroSpec extends WordSpec with Matchers with FutureResult """deriveObjectType[Unit, TestSubject](RenameField("id1", "foo"))""" shouldNot compile """deriveObjectType[Unit, TestSubject](FieldTags("id1", CachedTag))""" shouldNot compile """deriveObjectType[Unit, TestSubject](DeprecateField("id1", "test"))""" shouldNot compile - """deriveObjectType[Unit, TestSubject](FieldComplexity("id1", (_, _, _) ⇒ 1.0))""" shouldNot compile + """deriveObjectType[Unit, TestSubject](FieldComplexity("id1", (_, _, _) => 1.0))""" shouldNot compile """deriveObjectType[Unit, TestSubjectAnnotated](ExcludeFields("id", "list", "excluded"))""" shouldNot compile } @@ -134,7 +134,7 @@ class DeriveObjectTypeMacroSpec extends WordSpec with Matchers with FutureResult IncludeFields("id"), AddFields( Field("foo", ListType(StringType), resolve = _.value.list), - Field("bar", BooleanType, resolve = _ ⇒ true))) + Field("bar", BooleanType, resolve = _ => true))) tpe.fields should have size 3 @@ -151,7 +151,7 @@ class DeriveObjectTypeMacroSpec extends WordSpec with Matchers with FutureResult "allow to override fields" in { val tpe = deriveObjectType[Unit, TestSubject]( ReplaceField("id", Field("id", ListType(StringType), resolve = _.value.list)), - ReplaceField("list", Field("bar", BooleanType, resolve = _ ⇒ true))) + ReplaceField("list", Field("bar", BooleanType, resolve = _ => true))) tpe.fields should have size 3 @@ -167,7 +167,7 @@ class DeriveObjectTypeMacroSpec extends WordSpec with Matchers with FutureResult "allow to set field complexity with config" in { val tpe = deriveObjectType[Unit, TestSubject]( - FieldComplexity("id", (_, _, child) ⇒ child * 123.0)) + FieldComplexity("id", (_, _, child) => child * 123.0)) tpe.fields(0).complexity.get((), Args.empty, 2D) should be (246.0) } @@ -210,9 +210,9 @@ class DeriveObjectTypeMacroSpec extends WordSpec with Matchers with FutureResult contain("ID") and contain("MYLIST")) - val transformer2 = (s: String) ⇒ s.zipWithIndex.map { - case (c, i) if i % 2 == 0 ⇒ c.toLower - case (c, _) ⇒ c.toUpper + val transformer2 = (s: String) => s.zipWithIndex.map { + case (c, i) if i % 2 == 0 => c.toLower + case (c, _) => c.toUpper }.mkString("") val tpe2 = deriveObjectType[Unit, TestSubjectAnnotated](TransformFieldNames(transformer2)) @@ -324,15 +324,15 @@ class DeriveObjectTypeMacroSpec extends WordSpec with Matchers with FutureResult val schema = Schema(ArticleType) Executor.execute(schema, query, root = testArticle).await should be (Map( - "data" → Map( - "title" → "My First Article", - "text" → "foo bar", - "myTags" → null, - "fruit" → "JustApple", - "comments" → List( - Map("author" → "bob", "text" → null, "color" → "NormalRed"), + "data" -> Map( + "title" -> "My First Article", + "text" -> "foo bar", + "myTags" -> null, + "fruit" -> "JustApple", + "comments" -> List( + Map("author" -> "bob", "text" -> null, "color" -> "NormalRed"), null, - Map("author" → "jane", "text" → "yay!", "color" → "NormalRed"))))) + Map("author" -> "jane", "text" -> "yay!", "color" -> "NormalRed"))))) import sangria.marshalling.queryAst._ import sangria.parser.DeliveryScheme.Throw @@ -379,7 +379,7 @@ class DeriveObjectTypeMacroSpec extends WordSpec with Matchers with FutureResult graphql"{id, b {name, a {id}, b {name}} }" Executor.execute(schema, query, root = A(1, B("foo", A(2, null), B("bar", null, null)))).await should be (Map( - "data" → Map("id" → 1, "b" → Map("name" → "foo", "a" → Map("id" → 2), "b" → Map("name" → "bar"))))) + "data" -> Map("id" -> 1, "b" -> Map("name" -> "foo", "a" -> Map("id" -> 2), "b" -> Map("name" -> "bar"))))) } "use companion object to resolve derived types" in { @@ -390,7 +390,7 @@ class DeriveObjectTypeMacroSpec extends WordSpec with Matchers with FutureResult val query = graphql"{b {myC {e, e1}}}" Executor.execute(schema, query, root = CompanionA(CompanionB(CompanionC(CompanionEnum1, AnotherEnum.FOO)))).await should be (Map( - "data" → Map("b" → Map("myC" → Map("e" → "first", "e1" → "FOO"))))) + "data" -> Map("b" -> Map("myC" -> Map("e" -> "first", "e1" -> "FOO"))))) } "support `Future`, `Try`, `Defer` and `Action` return types" in { @@ -407,11 +407,11 @@ class DeriveObjectTypeMacroSpec extends WordSpec with Matchers with FutureResult val tpe = deriveObjectType[Unit, MyTest]() - tpe.fields.sortBy(_.name).map(f ⇒ f.name → f.fieldType) should be (List( - "actionVal" → OptionType(ListType(IntType)), - "deferVal" → OptionType(ListType(IntType)), - "futureVal" → ListType(IntType), - "tryVal" → OptionType(ListType(IntType)) + tpe.fields.sortBy(_.name).map(f => f.name -> f.fieldType) should be (List( + "actionVal" -> OptionType(ListType(IntType)), + "deferVal" -> OptionType(ListType(IntType)), + "futureVal" -> ListType(IntType), + "tryVal" -> OptionType(ListType(IntType)) )) } @@ -468,11 +468,11 @@ class DeriveObjectTypeMacroSpec extends WordSpec with Matchers with FutureResult """ Executor.execute(schema, query, Ctx(987, new FooBar)).await should be ( - JsObject("data" → JsObject( - "foo" → JsString("id = 123, songs = a,b, cc = Red, pet = Pet(xxx,Some(322)), ctx = 987"), - "foo1" → JsString("id = 123, songs = a,b, cc = Red, pet = Pet(mypet,Some(156)), ctx = 987"), - "opt" → JsString("str = None, color = None, pet = None"), - "opt1" → JsString("str = Some(test), color = Some(Red), pet = Some(Pet(anotherPet,Some(321)))")))) + JsObject("data" -> JsObject( + "foo" -> JsString("id = 123, songs = a,b, cc = Red, pet = Pet(xxx,Some(322)), ctx = 987"), + "foo1" -> JsString("id = 123, songs = a,b, cc = Red, pet = Pet(mypet,Some(156)), ctx = 987"), + "opt" -> JsString("str = None, color = None, pet = None"), + "opt1" -> JsString("str = Some(test), color = Some(Red), pet = Some(Pet(anotherPet,Some(321)))")))) import sangria.parser.DeliveryScheme.Throw @@ -528,7 +528,7 @@ class DeriveObjectTypeMacroSpec extends WordSpec with Matchers with FutureResult MethodArgumentDescription("hello", "id", "`id`"), MethodArgumentDescription("hello", "songs", "`songs`"), MethodArgumentRename("opt", "str", "description"), - MethodArgumentsDescription("opt", "str" → "Optional description", "color" -> "a color"), + MethodArgumentsDescription("opt", "str" -> "Optional description", "color" -> "a color"), MethodArgumentDefault("hello", "songs", "My favorite song" :: Nil), MethodArgumentDefault("opt", "pet", """{"name": "Bell", "size": 3}""".parseJson), MethodArgument("hello", "pet", "`pet`", Pet("Octocat", None))) diff --git a/src/test/scala/sangria/marshalling/EnumInputTypeSpec.scala b/src/test/scala/sangria/marshalling/EnumInputTypeSpec.scala index 88b67e75..eed4b7b4 100644 --- a/src/test/scala/sangria/marshalling/EnumInputTypeSpec.scala +++ b/src/test/scala/sangria/marshalling/EnumInputTypeSpec.scala @@ -37,7 +37,7 @@ class EnumInputTypeSpec extends WordSpec with Matchers { "valueFromComplex", sangria.schema.StringType, arguments = List(complexArgument), - resolve = { ctx ⇒ + resolve = { ctx => ctx.arg(complexArgument).getFields("enumValue").toString() } ) @@ -48,7 +48,7 @@ class EnumInputTypeSpec extends WordSpec with Matchers { "valueFromEnum", sangria.schema.StringType, arguments = List(enumArgument), - resolve = { ctx ⇒ + resolve = { ctx => JsString(ctx.arg(enumArgument)).value } ) @@ -90,7 +90,7 @@ class EnumInputTypeSpec extends WordSpec with Matchers { | } """.stripMargin - runQuery(query) should be (Map("data" → Map("valueFromComplex" → "TOP_VALUE"))) + runQuery(query) should be (Map("data" -> Map("valueFromComplex" -> "TOP_VALUE"))) } "correctly unmarshals an enum passed as an argument" in { @@ -101,6 +101,6 @@ class EnumInputTypeSpec extends WordSpec with Matchers { | } """.stripMargin - runQuery(query) should be (Map("data" → Map("valueFromEnum" → "TOP_VALUE"))) + runQuery(query) should be (Map("data" -> Map("valueFromEnum" -> "TOP_VALUE"))) } } diff --git a/src/test/scala/sangria/marshalling/FromInputSpec.scala b/src/test/scala/sangria/marshalling/FromInputSpec.scala index 2e805289..7e693920 100644 --- a/src/test/scala/sangria/marshalling/FromInputSpec.scala +++ b/src/test/scala/sangria/marshalling/FromInputSpec.scala @@ -53,7 +53,7 @@ class FromInputSpec extends WordSpec with Matchers { Field("optListOpt", OptionType(StringType), arguments = arg :: Nil, - resolve = ctx ⇒ { + resolve = ctx => { val value: Seq[Option[JsValue]] = ctx.arg(arg) "" + value.map(_.map(_.compactPrint)) @@ -87,7 +87,7 @@ class FromInputSpec extends WordSpec with Matchers { Field("nn", OptionType(StringType), arguments = arg :: Nil, - resolve = ctx ⇒ { + resolve = ctx => { val value: Article = ctx.arg(arg) "" + value @@ -102,7 +102,7 @@ class FromInputSpec extends WordSpec with Matchers { Field("optListOpt", OptionType(StringType), arguments = arg :: Nil, - resolve = ctx ⇒ { + resolve = ctx => { val value: Seq[Option[Article]] = ctx.arg(arg) "" + value @@ -123,7 +123,7 @@ class FromInputSpec extends WordSpec with Matchers { val ad = node.asInstanceOf[Map[String, Any]] def readComments(data: Seq[Option[Map[String, Any]]]) = { - data.toVector.map(_.map(cd ⇒ { + data.toVector.map(_.map(cd => { Comment( author = cd("author").asInstanceOf[Option[String]].getOrElse("manual default"), text = cd.get("text").flatMap(_.asInstanceOf[Option[String]])) @@ -145,7 +145,7 @@ class FromInputSpec extends WordSpec with Matchers { Field("nn", OptionType(StringType), arguments = arg :: Nil, - resolve = ctx ⇒ { + resolve = ctx => { val value: Article = ctx.arg(arg) "" + value @@ -156,7 +156,7 @@ class FromInputSpec extends WordSpec with Matchers { Field("opt", OptionType(StringType), arguments = arg :: Nil, - resolve = ctx ⇒ { + resolve = ctx => { val value: Option[Article] = ctx.arg(arg) "" + value @@ -168,7 +168,7 @@ class FromInputSpec extends WordSpec with Matchers { Field("optDef", OptionType(StringType), arguments = arg :: Nil, - resolve = ctx ⇒ { + resolve = ctx => { val value: Article = ctx.arg(arg) "" + value @@ -179,7 +179,7 @@ class FromInputSpec extends WordSpec with Matchers { Field("optList", OptionType(StringType), arguments = arg :: Nil, - resolve = ctx ⇒ { + resolve = ctx => { val value: Option[Seq[Article]] = ctx.arg(arg) "" + value @@ -193,7 +193,7 @@ class FromInputSpec extends WordSpec with Matchers { Field("optListDef", OptionType(StringType), arguments = arg :: Nil, - resolve = ctx ⇒ { + resolve = ctx => { val value: Seq[Article] = ctx.arg(arg) "" + value @@ -204,7 +204,7 @@ class FromInputSpec extends WordSpec with Matchers { Field("optListOpt", OptionType(StringType), arguments = arg :: Nil, - resolve = ctx ⇒ { + resolve = ctx => { val value: Option[Seq[Option[Article]]] = ctx.arg(arg) "" + value @@ -229,8 +229,8 @@ class FromInputSpec extends WordSpec with Matchers { }) } """, - Map("data" → Map( - "nn" → Article("First!", None, None, + Map("data" -> Map( + "nn" -> Article("First!", None, None, Vector(None, Some(Comment("anonymous", Some("Hello wold"))))).toString)) ) @@ -244,10 +244,10 @@ class FromInputSpec extends WordSpec with Matchers { o3: opt(article: {title: "foo", text: "bar", tags: null, comments: []}) } """, - Map("data" → Map( - "o1" → None.toString, - "o2" → None.toString, - "o3" → Some(Article("foo", Some("bar"), None, Vector.empty)).toString)) + Map("data" -> Map( + "o1" -> None.toString, + "o2" -> None.toString, + "o3" -> Some(Article("foo", Some("bar"), None, Vector.empty)).toString)) ) "deserialize manually with coerced scala result marshaller (single optional value with default)" in check( @@ -259,9 +259,9 @@ class FromInputSpec extends WordSpec with Matchers { od2: optDef(article: {title: "foo", text: "bar", tags: null, comments: []}) } """, - Map("data" → Map( - "od1" → Article("def", None, None, Vector(Some(Comment("aaa", None)), Some(Comment("bbb", Some("ccc"))), None)).toString, - "od2" → Article("foo", Some("bar"), None, Vector.empty).toString)) + Map("data" -> Map( + "od1" -> Article("def", None, None, Vector(Some(Comment("aaa", None)), Some(Comment("bbb", Some("ccc"))), None)).toString, + "od2" -> Article("foo", Some("bar"), None, Vector.empty).toString)) ) "deserialize manually with coerced scala result marshaller (optional list with not-null values)" in check( @@ -275,15 +275,15 @@ class FromInputSpec extends WordSpec with Matchers { ol4: optList(articles: $var2) } """, - Map("data" → Map( - "ol1" → None.toString, - "ol2" → Some(Vector( + Map("data" -> Map( + "ol1" -> None.toString, + "ol2" -> Some(Vector( Article("first", None, None, Vector(None)), Article("second", None, None, Vector(None, None)))).toString, - "ol3" → Some(Vector( + "ol3" -> Some(Vector( Article("foo", Some("bar"), Some(Vector("a", "b")), Vector( None, Some(Comment("anonymous", None)), Some(Comment("anonymous", Some("commnet3"))))))).toString, - "ol4" → Some(Vector( + "ol4" -> Some(Vector( Article("bar", None, None, Vector(None)), Article("baz", None, None, Vector.empty))).toString)), """ @@ -315,11 +315,11 @@ class FromInputSpec extends WordSpec with Matchers { old2: optListDef(articles: [{title: "first", comments: [null]}, {title: "second", comments: [null, null]}]) } """, - Map("data" → Map( - "old1" → Vector( + Map("data" -> Map( + "old1" -> Vector( Article("def1", None, Some(Vector("c", "d")), Vector(Some(Comment("c1", None)), None)), Article("def2", Some("some text"), None, Vector.empty)).toString, - "old2" → Vector( + "old2" -> Vector( Article("first", None, None, Vector(None)), Article("second", None, None, Vector(None, None))).toString)) ) @@ -335,17 +335,17 @@ class FromInputSpec extends WordSpec with Matchers { olo4: optListOpt(articles: $var2) } """, - Map("data" → Map( - "olo1" → None.toString, - "olo2" → Some(Vector( + Map("data" -> Map( + "olo1" -> None.toString, + "olo2" -> Some(Vector( Some(Article("first", None, None, Vector(None))), None, Some(Article("second", None, None, Vector(None, None))))).toString, - "olo3" → Some(Vector( + "olo3" -> Some(Vector( Some(Article("foo", Some("bar"), Some(Vector("a", "b")), Vector( None, Some(Comment("anonymous", None)), Some(Comment("anonymous", Some("commnet3")))))), None)).toString, - "olo4" → Some(Vector( + "olo4" -> Some(Vector( Some(Article("bar", None, None, Vector(None))), None, Some(Article("baz", None, None, Vector.empty)))).toString)), @@ -381,11 +381,11 @@ class FromInputSpec extends WordSpec with Matchers { olo4: optListOpt(articles: $var2) } """, - Map("data" → Map( - "olo1" → Vector(Some( """{"title":"def1","tags":["c","d"],"comments":[{"author":"c1"},null]}"""), None, Some( """{"title":"def2","text":"some text","comments":[]}""")).toString, - "olo2" → Vector(Some( """{"title":"first","comments":[null]}"""), None, Some( """{"title":"second","comments":[null,null]}""")).toString, - "olo3" → Vector(Some( """{"title":"foo","text":"bar","tags":["a","b"],"comments":[null,{"author":"anonymous"},{"author":"anonymous","text":"commnet3"}]}"""), None).toString, - "olo4" → Vector(Some( """{"title":"bar","text":null,"tags":null,"comments":[null]}"""), None, Some( """{"title":"baz","comments":[]}""")).toString)), + Map("data" -> Map( + "olo1" -> Vector(Some( """{"title":"def1","tags":["c","d"],"comments":[{"author":"c1"},null]}"""), None, Some( """{"title":"def2","text":"some text","comments":[]}""")).toString, + "olo2" -> Vector(Some( """{"title":"first","comments":[null]}"""), None, Some( """{"title":"second","comments":[null,null]}""")).toString, + "olo3" -> Vector(Some( """{"title":"foo","text":"bar","tags":["a","b"],"comments":[null,{"author":"anonymous"},{"author":"anonymous","text":"commnet3"}]}"""), None).toString, + "olo4" -> Vector(Some( """{"title":"bar","text":null,"tags":null,"comments":[null]}"""), None, Some( """{"title":"baz","comments":[]}""")).toString)), """ { "var1": { @@ -419,8 +419,8 @@ class FromInputSpec extends WordSpec with Matchers { }) } """, - Map("data" → Map( - "nn" → Article("First!", None, None, + Map("data" -> Map( + "nn" -> Article("First!", None, None, Vector(None, Some(Comment("anonymous", Some("Hello wold"))))).toString)) ) @@ -435,20 +435,20 @@ class FromInputSpec extends WordSpec with Matchers { olo4: optListOpt(articles: $var2) } """, - Map("data" → Map( - "olo1" → Vector( + Map("data" -> Map( + "olo1" -> Vector( Some(Article("def1", None, Some(Vector("c", "d")), Vector(Some(Comment("c1", None)), None))), None, Some(Article("def2", Some("some text"), None, Vector.empty))).toString, - "olo2" → Vector( + "olo2" -> Vector( Some(Article("first", None, None, Vector(None))), None, Some(Article("second", None, None, Vector(None, None)))).toString, - "olo3" → Vector( + "olo3" -> Vector( Some(Article("foo", Some("bar"), Some(Vector("a", "b")), Vector( None, Some(Comment("anonymous", None)), Some(Comment("anonymous", Some("commnet3")))))), None).toString, - "olo4" → Vector( + "olo4" -> Vector( Some(Article("bar", None, None, Vector(None))), None, Some(Article("baz", None, None, Vector.empty))).toString)), @@ -473,4 +473,4 @@ class FromInputSpec extends WordSpec with Matchers { """.parseJson ) } -} \ No newline at end of file +} diff --git a/src/test/scala/sangria/marshalling/IonSupportSpec.scala b/src/test/scala/sangria/marshalling/IonSupportSpec.scala index 29735590..91d19044 100644 --- a/src/test/scala/sangria/marshalling/IonSupportSpec.scala +++ b/src/test/scala/sangria/marshalling/IonSupportSpec.scala @@ -27,49 +27,49 @@ class IonSupportSpec extends WordSpec with Matchers with FutureResultSupport { case object BinaryCoercionViolation extends ValueCoercionViolation("Binary data is not supported as input") def parseDate(s: String) = Try(dateFormat.parse(s)) match { - case Success(d) ⇒ Right(d) - case Failure(error) ⇒ Left(DateCoercionViolation) + case Success(d) => Right(d) + case Failure(error) => Left(DateCoercionViolation) } val DateType = ScalarType[Date]("Date", - coerceOutput = (d, caps) ⇒ + coerceOutput = (d, caps) => if (caps.contains(DateSupport)) d else dateFormat.format(d), coerceUserInput = { - case s: String ⇒ parseDate(s) - case _ ⇒ Left(DateCoercionViolation) + case s: String => parseDate(s) + case _ => Left(DateCoercionViolation) }, coerceInput = { - case ast.StringValue(s, _, _, _, _) ⇒ parseDate(s) - case _ ⇒ Left(DateCoercionViolation) + case ast.StringValue(s, _, _, _, _) => parseDate(s) + case _ => Left(DateCoercionViolation) }) val BlobType = ScalarType[Array[Byte]]("Blob", - coerceOutput = (d, _) ⇒ d, - coerceUserInput = _ ⇒ Left(BinaryCoercionViolation), - coerceInput = _ ⇒ Left(BinaryCoercionViolation)) + coerceOutput = (d, _) => d, + coerceUserInput = _ => Left(BinaryCoercionViolation), + coerceInput = _ => Left(BinaryCoercionViolation)) val ClobType = ScalarType[Array[Byte]]("Clob", - coerceOutput = (d, _) ⇒ d, - coerceUserInput = _ ⇒ Left(BinaryCoercionViolation), - coerceInput = _ ⇒ Left(BinaryCoercionViolation), + coerceOutput = (d, _) => d, + coerceUserInput = _ => Left(BinaryCoercionViolation), + coerceInput = _ => Left(BinaryCoercionViolation), scalarInfo = Set(IonClobScalar)) - lazy val TestType: ObjectType[Unit, Unit] = ObjectType("Test", () ⇒ fields[Unit, Unit]( - Field("nested", OptionType(TestType), resolve = _ ⇒ ()), + lazy val TestType: ObjectType[Unit, Unit] = ObjectType("Test", () => fields[Unit, Unit]( + Field("nested", OptionType(TestType), resolve = _ => ()), Field("text", OptionType(StringType), arguments = Argument("toShow", StringType) :: Nil, - resolve = c ⇒ "foo " + c.arg[String]("toShow")), - Field("date", OptionType(DateType), resolve = _ ⇒ { + resolve = c => "foo " + c.arg[String]("toShow")), + Field("date", OptionType(DateType), resolve = _ => { val cal = Calendar.getInstance(TimeZone.getTimeZone("CET")) cal.set(2015, 5, 11, 18, 23, 14) cal.set(Calendar.MILLISECOND, 123) cal.getTime }), Field("blob", OptionType(BlobType), - resolve = _ ⇒ "foo bar".getBytes("UTF-8")), + resolve = _ => "foo bar".getBytes("UTF-8")), Field("clob", OptionType(ClobType), - resolve = _ ⇒ "foo bar baz".getBytes("UTF-8")))) + resolve = _ => "foo bar baz".getBytes("UTF-8")))) val schema = Schema(TestType) diff --git a/src/test/scala/sangria/marshalling/MarshallingUtilSpec.scala b/src/test/scala/sangria/marshalling/MarshallingUtilSpec.scala index 1297e080..955fe852 100644 --- a/src/test/scala/sangria/marshalling/MarshallingUtilSpec.scala +++ b/src/test/scala/sangria/marshalling/MarshallingUtilSpec.scala @@ -29,17 +29,17 @@ class MarshallingUtilSpec extends WordSpec with Matchers { out should be ( JsObject( - "id" → JsNumber(1), - "name" → JsString("door"), - "items" → JsArray(Vector( + "id" -> JsNumber(1), + "name" -> JsString("door"), + "items" -> JsArray(Vector( JsObject( - "state" → JsString("Open"), - "durability" → JsNumber(BigDecimal("0.1465645654675762354763254763343243242"))), + "state" -> JsString("Open"), + "durability" -> JsNumber(BigDecimal("0.1465645654675762354763254763343243242"))), JsNull, JsObject( - "state" → JsString("Open"), - "durability" → JsNumber(BigDecimal("0.5")), - "foo" → JsNull))))) + "state" -> JsString("Open"), + "durability" -> JsNumber(BigDecimal("0.5")), + "foo" -> JsNull))))) } "convert query AST to scala map" in { @@ -60,17 +60,17 @@ class MarshallingUtilSpec extends WordSpec with Matchers { out should be ( Map( - "id" → 1, - "name" → "door", - "items" → Vector( + "id" -> 1, + "name" -> "door", + "items" -> Vector( Map( - "state" → "Open", - "durability" → BigDecimal("0.1465645654675762354763254763343243242")), + "state" -> "Open", + "durability" -> BigDecimal("0.1465645654675762354763254763343243242")), null, Map( - "state" → "Open", - "durability" → BigDecimal("0.5"), - "foo" → null)))) + "state" -> "Open", + "durability" -> BigDecimal("0.5"), + "foo" -> null)))) } } diff --git a/src/test/scala/sangria/parser/QueryParserSpec.scala b/src/test/scala/sangria/parser/QueryParserSpec.scala index f42243ba..8fd93d39 100644 --- a/src/test/scala/sangria/parser/QueryParserSpec.scala +++ b/src/test/scala/sangria/parser/QueryParserSpec.scala @@ -1383,23 +1383,23 @@ class QueryParserSpec extends WordSpec with Matchers with StringMatchers { def findAst[T <: AstNode : ClassTag](ast: AstNode): Option[T] = ast match { - case node if implicitly[ClassTag[T]].runtimeClass.isAssignableFrom(node.getClass) ⇒ Some(node.asInstanceOf[T]) - case Document(defs, _, _, _) ⇒ defs map findAst[T] find (_.isDefined) flatten - case OperationDefinition(_, _, vars, _, _, _, _, _) ⇒ vars map findAst[T] find (_.isDefined) flatten - case VariableDefinition(_, _, default, _, _, _) ⇒ default flatMap findAst[T] - case _ ⇒ None + case node if implicitly[ClassTag[T]].runtimeClass.isAssignableFrom(node.getClass) => Some(node.asInstanceOf[T]) + case Document(defs, _, _, _) => defs map findAst[T] find (_.isDefined) flatten + case OperationDefinition(_, _, vars, _, _, _, _, _) => vars map findAst[T] find (_.isDefined) flatten + case VariableDefinition(_, _, default, _, _, _) => default flatMap findAst[T] + case _ => None } "parse int values" in { val expectedTable = Vector( - "4" → BigInt("4"), - "-4" → BigInt("-4"), - "9" → BigInt("9"), - "0" → BigInt("0"), - "784236564875237645762347623147574756321" → BigInt("784236564875237645762347623147574756321") + "4" -> BigInt("4"), + "-4" -> BigInt("-4"), + "9" -> BigInt("9"), + "0" -> BigInt("0"), + "784236564875237645762347623147574756321" -> BigInt("784236564875237645762347623147574756321") ) - expectedTable foreach { expected ⇒ + expectedTable foreach { expected => findAst[BigIntValue](parseQuery(s"query Foo($$x: Complex = ${expected._1}) { field }").get) should be ( Some(BigIntValue(expected._2, Vector.empty, Some(AstLocation(24, 1, 25))))) } @@ -1407,18 +1407,18 @@ class QueryParserSpec extends WordSpec with Matchers with StringMatchers { "parse float values" in { val expectedTable = Vector( - "4.123" → BigDecimal("4.123"), - "-4.123" → BigDecimal("-4.123"), - "0.123" → BigDecimal("0.123"), - "123E4" → BigDecimal("123E4"), - "123e-4" → BigDecimal("123e-4"), - "-1.123e4" → BigDecimal("-1.123e4"), - "-1.123E4" → BigDecimal("-1.123E4"), - "-1.123e+4" → BigDecimal("-1.123e+4"), - "-1.123e4567" → BigDecimal("-1.123e4567") + "4.123" -> BigDecimal("4.123"), + "-4.123" -> BigDecimal("-4.123"), + "0.123" -> BigDecimal("0.123"), + "123E4" -> BigDecimal("123E4"), + "123e-4" -> BigDecimal("123e-4"), + "-1.123e4" -> BigDecimal("-1.123e4"), + "-1.123E4" -> BigDecimal("-1.123E4"), + "-1.123e+4" -> BigDecimal("-1.123e+4"), + "-1.123e4567" -> BigDecimal("-1.123e4567") ) - expectedTable foreach { expected ⇒ + expectedTable foreach { expected => withClue(s"Parsing ${expected._1}.") { findAst[BigDecimalValue](parseQuery(s"query Foo($$x: Complex = ${expected._1}) { field }").get) should be( Some(BigDecimalValue(expected._2, Vector.empty, Some(AstLocation(24, 1, 25))))) @@ -1443,17 +1443,17 @@ class QueryParserSpec extends WordSpec with Matchers with StringMatchers { "parse input values independently" in { val expectedTable = Vector( - "null" → NullValue(Vector.empty, Some(AstLocation(0, 1, 1))), - "1.234" → BigDecimalValue(BigDecimal("1.234"), Vector.empty, Some(AstLocation(0, 1, 1))), - "HELLO_WORLD" → EnumValue("HELLO_WORLD", Vector.empty, Some(AstLocation(0, 1, 1))), - "[1, 2 \"test\"]" → ListValue( + "null" -> NullValue(Vector.empty, Some(AstLocation(0, 1, 1))), + "1.234" -> BigDecimalValue(BigDecimal("1.234"), Vector.empty, Some(AstLocation(0, 1, 1))), + "HELLO_WORLD" -> EnumValue("HELLO_WORLD", Vector.empty, Some(AstLocation(0, 1, 1))), + "[1, 2 \"test\"]" -> ListValue( Vector( BigIntValue(1, Vector.empty, Some(AstLocation(1, 1, 2))), BigIntValue(2, Vector.empty, Some(AstLocation(4, 1, 5))), StringValue("test", false, None, Vector.empty, Some(AstLocation(6, 1, 7)))), Vector.empty, Some(AstLocation(0, 1, 1))), - "{a: 1, b: \"foo\" c: {nest: true, oops: null, e: FOO_BAR}}" → + "{a: 1, b: \"foo\" c: {nest: true, oops: null, e: FOO_BAR}}" -> ObjectValue( Vector( ObjectField("a", BigIntValue(1, Vector.empty, Some(AstLocation(4, 1, 5))), Vector.empty, Some(AstLocation(1, 1, 2))), @@ -1477,7 +1477,7 @@ class QueryParserSpec extends WordSpec with Matchers with StringMatchers { # This is a test comment! b: "foo" } - """ → + """ -> ObjectValue( Vector( ObjectField("a", BigIntValue(1, Vector.empty, Some(AstLocation(26, 3, 15))), Vector.empty, Some(AstLocation(23, 3, 12))), @@ -1489,7 +1489,7 @@ class QueryParserSpec extends WordSpec with Matchers with StringMatchers { ) - expectedTable foreach { expected ⇒ + expectedTable foreach { expected => withClue(s"Parsing ${expected._1}.") { QueryParser.parseInput(stripCarriageReturns(expected._1)) should equal (Success(expected._2)) } @@ -2057,4 +2057,4 @@ class QueryParserSpec extends WordSpec with Matchers with StringMatchers { (parseQuery(query1) == parseQuery(query2)) should be (false) } } -} \ No newline at end of file +} diff --git a/src/test/scala/sangria/renderer/QueryRendererSpec.scala b/src/test/scala/sangria/renderer/QueryRendererSpec.scala index 0bdd2498..2055f4e4 100644 --- a/src/test/scala/sangria/renderer/QueryRendererSpec.scala +++ b/src/test/scala/sangria/renderer/QueryRendererSpec.scala @@ -566,7 +566,7 @@ class QueryRendererSpec extends WordSpec with Matchers with StringMatchers { val Success(origAst) = QueryParser.parse(FileUtil loadQuery "block-string.graphql") val ast = origAst.visit(AstVisitor { - case s: StringValue ⇒ VisitorCommand.Transform(s.copy(block = false, blockRawValue = None)) + case s: StringValue => VisitorCommand.Transform(s.copy(block = false, blockRawValue = None)) }) val prettyRendered = QueryRenderer.render(ast, QueryRenderer.Pretty) @@ -600,7 +600,7 @@ class QueryRendererSpec extends WordSpec with Matchers with StringMatchers { val Success(ast) = QueryParser.parse(FileUtil loadQuery "block-string.graphql") def withoutRaw(withRaw: Document, block: Boolean = true) = AstVisitor.visit(withRaw, AstVisitor { - case s: StringValue ⇒ VisitorCommand.Transform(s.copy(block = if (block) s.block else false, blockRawValue = None)) + case s: StringValue => VisitorCommand.Transform(s.copy(block = if (block) s.block else false, blockRawValue = None)) }) val prettyRendered = QueryRenderer.render(ast, QueryRenderer.Pretty) @@ -678,7 +678,7 @@ class QueryRendererSpec extends WordSpec with Matchers with StringMatchers { val Success(ast) = QueryParser.parse(FileUtil loadQuery "schema-kitchen-sink.graphql") def noBlock(a: AstNode) = AstVisitor.visit(a, AstVisitor { - case v: StringValue ⇒ VisitorCommand.Transform(v.copy(block = false, blockRawValue = None)) + case v: StringValue => VisitorCommand.Transform(v.copy(block = false, blockRawValue = None)) }) val prettyRendered = QueryRenderer.render(ast, QueryRenderer.Pretty) @@ -968,4 +968,4 @@ class QueryRendererSpec extends WordSpec with Matchers with StringMatchers { } } } -} \ No newline at end of file +} diff --git a/src/test/scala/sangria/renderer/SchemaRenderSpec.scala b/src/test/scala/sangria/renderer/SchemaRenderSpec.scala index 73ed2c74..3604bd88 100644 --- a/src/test/scala/sangria/renderer/SchemaRenderSpec.scala +++ b/src/test/scala/sangria/renderer/SchemaRenderSpec.scala @@ -19,9 +19,9 @@ class SchemaRenderSpec extends WordSpec with Matchers with FutureResultSupport w def renderForTest[T: InputUnmarshaller](res: T, schema: Schema[_, _]) = "\n" + SchemaRenderer.renderSchema(res)+ "\n" def renderForTest(schema: Schema[Unit, Unit]) = "\n" + SchemaRenderer.renderSchema(schema) + "\n" - def renderSingleFieldSchema(tpe: OutputType[_], args: List[Argument[_]] = Nil)(implicit render: Schema[Unit, Unit] ⇒ String) = { + def renderSingleFieldSchema(tpe: OutputType[_], args: List[Argument[_]] = Nil)(implicit render: Schema[Unit, Unit] => String) = { val root = ObjectType("Root", fields[Unit, Unit]( - Field("singleField", tpe.asInstanceOf[OutputType[Unit]], arguments = args, resolve = _ ⇒ ()) + Field("singleField", tpe.asInstanceOf[OutputType[Unit]], arguments = args, resolve = _ => ()) )) val schema = Schema(root) @@ -30,7 +30,7 @@ class SchemaRenderSpec extends WordSpec with Matchers with FutureResultSupport w val quotes = "\"\"\"" - def `default schema renderer`(implicit render: Schema[Unit, Unit] ⇒ String): Unit = { + def `default schema renderer`(implicit render: Schema[Unit, Unit] => String): Unit = { "Prints String Field" in { renderSingleFieldSchema(OptionType(StringType)) should equal (""" |schema { @@ -93,11 +93,11 @@ class SchemaRenderSpec extends WordSpec with Matchers with FutureResultSupport w "Print Object Field" in { val foo = ObjectType("Foo", fields[Unit, Unit]( - Field("str", OptionType(StringType), resolve = _ ⇒ "foo") + Field("str", OptionType(StringType), resolve = _ => "foo") )) val root = ObjectType("Root", fields[Unit, Unit]( - Field("foo", OptionType(foo), resolve = _ ⇒ ()) + Field("foo", OptionType(foo), resolve = _ => ()) )) val schema = Schema(root) @@ -236,14 +236,14 @@ class SchemaRenderSpec extends WordSpec with Matchers with FutureResultSupport w "Print Interface" in { val foo = InterfaceType("Foo", "My\ndescription", fields[Unit, Unit]( - Field("str", OptionType(StringType), description = Some("field\ndescription"), resolve = _ ⇒ "foo") + Field("str", OptionType(StringType), description = Some("field\ndescription"), resolve = _ => "foo") )) val bar = ObjectType("Bar", interfaces[Unit, Unit](foo), fields[Unit, Unit]( - Field("str", OptionType(StringType), resolve = _ ⇒ "foo"))) + Field("str", OptionType(StringType), resolve = _ => "foo"))) val root = ObjectType("Root", fields[Unit, Unit]( - Field("bar", OptionType(bar), resolve = _ ⇒ ()) + Field("bar", OptionType(bar), resolve = _ => ()) )) val schema = Schema(root) @@ -277,17 +277,17 @@ class SchemaRenderSpec extends WordSpec with Matchers with FutureResultSupport w "Print Multiple Interface" in { val foo = InterfaceType("Foo", fields[Unit, Unit]( - Field("str", OptionType(StringType), resolve = _ ⇒ "foo") + Field("str", OptionType(StringType), resolve = _ => "foo") )) val baz = InterfaceType("Baaz", fields[Unit, Unit]( - Field("int", OptionType(IntType), resolve = _ ⇒ 1) + Field("int", OptionType(IntType), resolve = _ => 1) )) val bar = ObjectType("Bar", interfaces[Unit, Unit](foo, baz), Nil) val root = ObjectType("Root", fields[Unit, Unit]( - Field("bar", OptionType(bar), resolve = _ ⇒ ()) + Field("bar", OptionType(bar), resolve = _ => ()) )) val schema = Schema(root) @@ -318,17 +318,17 @@ class SchemaRenderSpec extends WordSpec with Matchers with FutureResultSupport w "Print Multiple Interface (with interface hierarchy)" in { val foo = InterfaceType("Foo", fields[Unit, Unit]( - Field("str", OptionType(StringType), resolve = _ ⇒ "foo") + Field("str", OptionType(StringType), resolve = _ => "foo") )) val baz = InterfaceType("Baaz", fields[Unit, Unit]( - Field("int", OptionType(IntType), resolve = _ ⇒ 1) + Field("int", OptionType(IntType), resolve = _ => 1) ), interfaces[Unit, Unit](foo)) val bar = ObjectType("Bar", interfaces[Unit, Unit](baz), Nil) val root = ObjectType("Root", fields[Unit, Unit]( - Field("bar", OptionType(bar), resolve = _ ⇒ ()) + Field("bar", OptionType(bar), resolve = _ => ()) )) val schema = Schema(root) @@ -360,19 +360,19 @@ class SchemaRenderSpec extends WordSpec with Matchers with FutureResultSupport w "Print Unions" in { val foo = ObjectType("Foo", fields[Unit, Unit]( - Field("bool", OptionType(BooleanType), resolve = _ ⇒ true) + Field("bool", OptionType(BooleanType), resolve = _ => true) )) val bar = ObjectType("Bar", fields[Unit, Unit]( - Field("str", OptionType(StringType), resolve = _ ⇒ "f") + Field("str", OptionType(StringType), resolve = _ => "f") )) val singleUnion = UnionType("SingleUnion", types = foo :: Nil) val multipleUnion = UnionType("MultipleUnion", types = foo :: bar :: Nil) val root = ObjectType("Root", fields[Unit, Unit]( - Field("single", OptionType(singleUnion), resolve = _ ⇒ ()), - Field("multiple", OptionType(multipleUnion), resolve = _ ⇒ ()) + Field("single", OptionType(singleUnion), resolve = _ => ()), + Field("multiple", OptionType(multipleUnion), resolve = _ => ()) )) val schema = Schema(root) @@ -410,12 +410,12 @@ class SchemaRenderSpec extends WordSpec with Matchers with FutureResultSupport w val inputType = InputObjectType("InputType", "My\ndescription", List( InputField("int", OptionInputType(IntType), description = "My\nfield\ndescription"), InputField("article", OptionInputType(articleType), description = "has a default!", - defaultValue = scalaInput(Map("title" → "Hello", "auhor" → "Bob", "comments" → List("first!", "looks good!")))))) + defaultValue = scalaInput(Map("title" -> "Hello", "auhor" -> "Bob", "comments" -> List("first!", "looks good!")))))) val root = ObjectType("Root", fields[Unit, Unit]( Field("str", OptionType(StringType), arguments = Argument("argOne", OptionInputType(inputType)) :: Nil, - resolve = _ ⇒ None) + resolve = _ => None) )) val schema = Schema(root) @@ -464,18 +464,18 @@ class SchemaRenderSpec extends WordSpec with Matchers with FutureResultSupport w description = Some("My\ndescription"), coerceOutput = valueOutput, coerceUserInput = { - case i: Int if i % 2 != 0 ⇒ Right(i) - case i: BigInt if i.isValidInt && i % 2 != BigInt(0) ⇒ Right(i.intValue) - case _ ⇒ Left(IntCoercionViolation) + case i: Int if i % 2 != 0 => Right(i) + case i: BigInt if i.isValidInt && i % 2 != BigInt(0) => Right(i.intValue) + case _ => Left(IntCoercionViolation) }, coerceInput = { - case ast.IntValue(i, _, _) if i % 2 != 0 ⇒ Right(i) - case ast.BigIntValue(i, _, _) if i.isValidInt && i % 2 != BigInt(0) ⇒ Right(i.intValue) - case _ ⇒ Left(IntCoercionViolation) + case ast.IntValue(i, _, _) if i % 2 != 0 => Right(i) + case ast.BigIntValue(i, _, _) if i.isValidInt && i % 2 != BigInt(0) => Right(i.intValue) + case _ => Left(IntCoercionViolation) }) val root = ObjectType("Root", fields[Unit, Unit]( - Field("odd", OptionType(odd), resolve = _ ⇒ None) + Field("odd", OptionType(odd), resolve = _ => None) )) val schema = Schema(root) @@ -506,7 +506,7 @@ class SchemaRenderSpec extends WordSpec with Matchers with FutureResultSupport w EnumValue("BLUE", value = 3, deprecationReason = Some(DefaultDeprecationReason)))) val root = ObjectType("Root", fields[Unit, Unit]( - Field("rgb", OptionType(rgb), resolve = _ ⇒ None) + Field("rgb", OptionType(rgb), resolve = _ => None) )) val schema = Schema(root) @@ -545,10 +545,10 @@ class SchemaRenderSpec extends WordSpec with Matchers with FutureResultSupport w Argument("last", OptionInputType(IntType), "Another descr") :: Nil, locations = Set(DirectiveLocation.FieldDefinition, DirectiveLocation.InputFieldDefinition), - shouldInclude = _ ⇒ true) + shouldInclude = _ => true) val root = ObjectType("Root", fields[Unit, Unit]( - Field("foo", OptionType(StringType), resolve = _ ⇒ None))) + Field("foo", OptionType(StringType), resolve = _ => None))) val schema = Schema(root, directives = BuiltinDirectives :+ myDirective) @@ -583,11 +583,11 @@ class SchemaRenderSpec extends WordSpec with Matchers with FutureResultSupport w } "Introspection-based Schema Renderer" should { - behave like `default schema renderer` (schema ⇒ renderForTest(Executor.execute(schema, introspectionQuery).await, schema)) + behave like `default schema renderer` (schema => renderForTest(Executor.execute(schema, introspectionQuery).await, schema)) "throw an exception if introspection results contain some errors" in { val root = ObjectType("Root", fields[Unit, Unit]( - Field("singleField", StringType, resolve = _ ⇒ "") + Field("singleField", StringType, resolve = _ => "") )) val schema = Schema(root) @@ -598,12 +598,12 @@ class SchemaRenderSpec extends WordSpec with Matchers with FutureResultSupport w } "Schema-based Schema Renderer" should { - behave like `default schema renderer` (schema ⇒ renderForTest(schema)) + behave like `default schema renderer` (schema => renderForTest(schema)) } "Introspection Schema Renderer" should { "Print Introspection Schema" in { - val schema = Schema(ObjectType("Root", fields[Unit, Unit](Field("foo", IntType, resolve = _ ⇒ 1)))) + val schema = Schema(ObjectType("Root", fields[Unit, Unit](Field("foo", IntType, resolve = _ => 1)))) val rendered = SchemaRenderer.renderSchema(Executor.execute(schema, introspectionQuery).await, SchemaFilter.introspection) ("\n" + rendered + "\n") should equal (s""" @@ -775,7 +775,7 @@ class SchemaRenderSpec extends WordSpec with Matchers with FutureResultSupport w } "Print schema with legacy comment descriptions" in { - val schema = Schema(ObjectType("Root", fields[Unit, Unit](Field("foo", IntType, resolve = _ ⇒ 1)))) + val schema = Schema(ObjectType("Root", fields[Unit, Unit](Field("foo", IntType, resolve = _ => 1)))) val rendered = schema.renderPretty(SchemaFilter.introspection.withLegacyCommentDescriptions) ("\n" + rendered + "\n") should equal (""" diff --git a/src/test/scala/sangria/schema/ArgsSpec.scala b/src/test/scala/sangria/schema/ArgsSpec.scala index ac995af0..e1cbc1a5 100644 --- a/src/test/scala/sangria/schema/ArgsSpec.scala +++ b/src/test/scala/sangria/schema/ArgsSpec.scala @@ -59,7 +59,7 @@ class ArgsSpec extends WordSpec with Matchers { } "build with defined arguments" in { - val expectedMap = Map(NonDefaultArgumentName → 9001) + val expectedMap = Map(NonDefaultArgumentName -> 9001) val args = Args(List(nonDefaultArgument), expectedMap) args.raw should be (expectedMap) args.argsWithDefault should be (Set.empty) @@ -75,8 +75,8 @@ class ArgsSpec extends WordSpec with Matchers { } "build with optional argument and defined input" in { - val args = Args(List(optionalArgument), Map(OptionalArgumentName → 9001)) - args.raw should be (Map(OptionalArgumentName → Some(9001))) + val args = Args(List(optionalArgument), Map(OptionalArgumentName -> 9001)) + args.raw should be (Map(OptionalArgumentName -> Some(9001))) args.argsWithDefault should be (Set.empty) args.optionalArgs should be (Set(OptionalArgumentName)) args.undefinedArgs should be (Set.empty) @@ -98,7 +98,7 @@ class ArgsSpec extends WordSpec with Matchers { "build with default values" in { val args = Args(List(defaultArgument)) - args.raw should be (Map(DefaultArgumentName → Some(10))) + args.raw should be (Map(DefaultArgumentName -> Some(10))) args.argsWithDefault should be (Set(DefaultArgumentName)) args.optionalArgs should be (Set(DefaultArgumentName)) args.undefinedArgs should be (Set(DefaultArgumentName)) @@ -108,8 +108,8 @@ class ArgsSpec extends WordSpec with Matchers { } "build with overriden default values" in { - val args = Args(List(defaultArgument), Map(DefaultArgumentName → 9001)) - args.raw should be (Map(DefaultArgumentName → Some(9001))) + val args = Args(List(defaultArgument), Map(DefaultArgumentName -> 9001)) + args.raw should be (Map(DefaultArgumentName -> Some(9001))) args.argsWithDefault should be (Set(DefaultArgumentName)) args.optionalArgs should be (Set(DefaultArgumentName)) args.undefinedArgs should be (Set.empty) @@ -121,9 +121,9 @@ class ArgsSpec extends WordSpec with Matchers { "buildArgs with spray-json" should { "build with defined argument" in { - val json = JsObject(NonDefaultArgumentName → JsNumber(10)) + val json = JsObject(NonDefaultArgumentName -> JsNumber(10)) val args = Args(List(nonDefaultArgument), json) - args.raw should be (Map(NonDefaultArgumentName → 10)) + args.raw should be (Map(NonDefaultArgumentName -> 10)) args.argsWithDefault should be (Set.empty) args.optionalArgs should be (Set.empty) args.undefinedArgs should be (Set.empty) @@ -137,9 +137,9 @@ class ArgsSpec extends WordSpec with Matchers { } "build with optional argument and defined input" in { - val json = JsObject(OptionalArgumentName → JsNumber(9001)) + val json = JsObject(OptionalArgumentName -> JsNumber(9001)) val args = Args(List(optionalArgument), json) - args.raw should be (Map(OptionalArgumentName → Some(9001))) + args.raw should be (Map(OptionalArgumentName -> Some(9001))) args.argsWithDefault should be (Set.empty) args.optionalArgs should be (Set(OptionalArgumentName)) args.undefinedArgs should be (Set.empty) @@ -160,9 +160,9 @@ class ArgsSpec extends WordSpec with Matchers { } "build with overriden default values" in { - val json = JsObject(DefaultArgumentName → JsNumber(9001)) + val json = JsObject(DefaultArgumentName -> JsNumber(9001)) val args = Args(List(defaultArgument), json) - args.raw should be (Map(DefaultArgumentName → Some(9001))) + args.raw should be (Map(DefaultArgumentName -> Some(9001))) args.argsWithDefault should be (Set(DefaultArgumentName)) args.optionalArgs should be (Set(DefaultArgumentName)) args.undefinedArgs should be (Set.empty) @@ -174,10 +174,10 @@ class ArgsSpec extends WordSpec with Matchers { "buildArgs with nested json objects" should { "build with nested arguments" in { - val json = JsObject(NestedParentArgumentName → JsObject( - NonDefaultArgumentName → JsNumber(1), - DefaultArgumentName → JsNumber(2), - OptionalArgumentName → JsNumber(3))) + val json = JsObject(NestedParentArgumentName -> JsObject( + NonDefaultArgumentName -> JsNumber(1), + DefaultArgumentName -> JsNumber(2), + OptionalArgumentName -> JsNumber(3))) val args = Args(List(nestedParentArgument), json) val fields = args.arg(nestedParentArgument).asJsObject.fields @@ -188,17 +188,17 @@ class ArgsSpec extends WordSpec with Matchers { } "not build without required arguments" in { - val json = JsObject(NestedParentArgumentName → JsObject( - DefaultArgumentName → JsNumber(2), - OptionalArgumentName → JsNumber(3))) + val json = JsObject(NestedParentArgumentName -> JsObject( + DefaultArgumentName -> JsNumber(2), + OptionalArgumentName -> JsNumber(3))) an [AttributeCoercionError] should be thrownBy Args(List(nestedParentArgument), json) } "build without default arguments" in { - val json = JsObject(NestedParentArgumentName → JsObject( - NonDefaultArgumentName → JsNumber(1), - OptionalArgumentName → JsNumber(3))) + val json = JsObject(NestedParentArgumentName -> JsObject( + NonDefaultArgumentName -> JsNumber(1), + OptionalArgumentName -> JsNumber(3))) val args = Args(List(nestedParentArgument), json) val fields = args.arg(nestedParentArgument).asJsObject.fields @@ -209,9 +209,9 @@ class ArgsSpec extends WordSpec with Matchers { } "build without optional arguments" in { - val json = JsObject(NestedParentArgumentName → JsObject( - NonDefaultArgumentName → JsNumber(1), - DefaultArgumentName → JsNumber(2))) + val json = JsObject(NestedParentArgumentName -> JsObject( + NonDefaultArgumentName -> JsNumber(1), + DefaultArgumentName -> JsNumber(2))) val args = Args(List(nestedParentArgument), json) val fields = args.arg(nestedParentArgument).asJsObject.fields @@ -224,10 +224,10 @@ class ArgsSpec extends WordSpec with Matchers { "buildArgs with nested map objects" should { "build with nested arguments" in { - val inputMap = NestedParentArgumentName → Map( - NonDefaultArgumentName → 1, - DefaultArgumentName → 2, - OptionalArgumentName → 3) + val inputMap = NestedParentArgumentName -> Map( + NonDefaultArgumentName -> 1, + DefaultArgumentName -> 2, + OptionalArgumentName -> 3) val args = Args(List(nestedParentArgument), inputMap) val fields = args.arg(nestedParentArgument).asJsObject.fields @@ -238,17 +238,17 @@ class ArgsSpec extends WordSpec with Matchers { } "not build without required arguments" in { - val inputMap = NestedParentArgumentName → Map( - DefaultArgumentName → 2, - OptionalArgumentName → 3) + val inputMap = NestedParentArgumentName -> Map( + DefaultArgumentName -> 2, + OptionalArgumentName -> 3) an [AttributeCoercionError] should be thrownBy Args(List(nestedParentArgument), inputMap) } "build without default arguments" in { - val inputMap = NestedParentArgumentName → Map( - NonDefaultArgumentName → 1, - OptionalArgumentName → 3) + val inputMap = NestedParentArgumentName -> Map( + NonDefaultArgumentName -> 1, + OptionalArgumentName -> 3) val args = Args(List(nestedParentArgument), inputMap) val fields = args.arg(nestedParentArgument).asJsObject.fields @@ -259,9 +259,9 @@ class ArgsSpec extends WordSpec with Matchers { } "build without optional arguments" in { - val inputMap = NestedParentArgumentName → Map( - NonDefaultArgumentName → 1, - DefaultArgumentName → 2) + val inputMap = NestedParentArgumentName -> Map( + NonDefaultArgumentName -> 1, + DefaultArgumentName -> 2) val args = Args(List(nestedParentArgument), inputMap) val fields = args.arg(nestedParentArgument).asJsObject.fields diff --git a/src/test/scala/sangria/schema/AstSchemaMaterializerSpec.scala b/src/test/scala/sangria/schema/AstSchemaMaterializerSpec.scala index 6ff2739e..ab9e29d4 100644 --- a/src/test/scala/sangria/schema/AstSchemaMaterializerSpec.scala +++ b/src/test/scala/sangria/schema/AstSchemaMaterializerSpec.scala @@ -733,11 +733,11 @@ class AstSchemaMaterializerSpec extends WordSpec with Matchers with FutureResult def loadComments: List[JsValue] = List(JsObject( - "text" → JsString("First!"), - "author" → JsObject( - "name" → JsString("Jane"), - "lastComment" → JsObject( - "text" → JsString("Boring..."))))) + "text" -> JsString("First!"), + "author" -> JsObject( + "name" -> JsString("Jane"), + "lastComment" -> JsObject( + "text" -> JsString("Boring..."))))) } val ArticleType = deriveObjectType[Repo, Article]() @@ -747,7 +747,7 @@ class AstSchemaMaterializerSpec extends WordSpec with Matchers with FutureResult val QueryType = ObjectType("Query", fields[Repo, Unit]( Field("article", OptionType(ArticleType), arguments = IdArg :: Nil, - resolve = c ⇒ c.ctx.loadArticle(c arg IdArg)))) + resolve = c => c.ctx.loadArticle(c arg IdArg)))) val staticSchema = Schema(QueryType) @@ -776,16 +776,16 @@ class AstSchemaMaterializerSpec extends WordSpec with Matchers with FutureResult definition: ast.FieldDefinition, mat: AstSchemaMaterializer[Repo]) = if (definition.directives.exists(_.name == "loadComments")) - c ⇒ c.ctx.loadComments + c => c.ctx.loadComments else - c ⇒ resolveJson(c.field.name, c.field.fieldType, c.value.asInstanceOf[JsValue]) + c => resolveJson(c.field.name, c.field.fieldType, c.value.asInstanceOf[JsValue]) def resolveJson(name: String, tpe: OutputType[_], json: JsValue): Any = tpe match { - case OptionType(ofType) ⇒ resolveJson(name, ofType, json) - case ListType(ofType) ⇒ json.asInstanceOf[JsArray].elements.map(resolveJson(name, ofType, _)) - case StringType ⇒ json.asJsObject.fields(name).asInstanceOf[JsString].value - case _ if json.asJsObject.fields(name).isInstanceOf[JsObject] ⇒ json.asJsObject.fields(name) - case t ⇒ throw new IllegalStateException(s"Type ${SchemaRenderer.renderTypeName(t)} is not supported") + case OptionType(ofType) => resolveJson(name, ofType, json) + case ListType(ofType) => json.asInstanceOf[JsArray].elements.map(resolveJson(name, ofType, _)) + case StringType => json.asJsObject.fields(name).asInstanceOf[JsString].value + case _ if json.asJsObject.fields(name).isInstanceOf[JsObject] => json.asJsObject.fields(name) + case t => throw new IllegalStateException(s"Type ${SchemaRenderer.renderTypeName(t)} is not supported") } } @@ -1229,8 +1229,8 @@ class AstSchemaMaterializerSpec extends WordSpec with Matchers with FutureResult } """ - val ReturnCat = Directive("returnCat", locations = Set(DirectiveLocation.FieldDefinition), shouldInclude = _ ⇒ true) - val ReturnDog = Directive("returnDog", locations = Set(DirectiveLocation.FieldDefinition), shouldInclude = _ ⇒ true) + val ReturnCat = Directive("returnCat", locations = Set(DirectiveLocation.FieldDefinition), shouldInclude = _ => true) + val ReturnDog = Directive("returnDog", locations = Set(DirectiveLocation.FieldDefinition), shouldInclude = _ => true) val customBuilder = new DefaultAstSchemaBuilder[Unit] { override def resolveField( @@ -1240,41 +1240,41 @@ class AstSchemaMaterializerSpec extends WordSpec with Matchers with FutureResult definition: ast.FieldDefinition, mat: AstSchemaMaterializer[Unit]) = if (definition.directives.exists(_.name == ReturnCat.name)) - _ ⇒ Map("type" → "Cat", "name" → "foo", "age" → Some(10)) + _ => Map("type" -> "Cat", "name" -> "foo", "age" -> Some(10)) else if (definition.directives.exists(_.name == ReturnDog.name)) - _ ⇒ Map("type" → "Dog", "name" → "bar", "nickname" → Some("baz")) + _ => Map("type" -> "Dog", "name" -> "bar", "nickname" -> Some("baz")) else if (definition.name == "add") - ctx ⇒ ctx.arg[Int]("a") + ctx.arg[Int]("b") + ctx => ctx.arg[Int]("a") + ctx.arg[Int]("b") else _.value.asInstanceOf[Map[String, Any]](definition.name) override def objectTypeInstanceCheck(origin: MatOrigin, definition: ObjectTypeDefinition, extensions: List[ast.ObjectTypeExtensionDefinition]) = - Some((value, _) ⇒ value.asInstanceOf[Map[String, Any]]("type") == definition.name) + Some((value, _) => value.asInstanceOf[Map[String, Any]]("type") == definition.name) override def scalarCoerceUserInput(definition: ast.ScalarTypeDefinition) = - value ⇒ definition.name match { - case "Custom" ⇒ value match { - case i: Int ⇒ Right(i) - case i: BigInt ⇒ Right(i.intValue) - case _ ⇒ Left(IntCoercionViolation) + value => definition.name match { + case "Custom" => value match { + case i: Int => Right(i) + case i: BigInt => Right(i.intValue) + case _ => Left(IntCoercionViolation) } - case _ ⇒ Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) + case _ => Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) } override def scalarCoerceInput(definition: ast.ScalarTypeDefinition) = - value ⇒ definition.name match { - case "Custom" ⇒ value match { - case ast.IntValue(i, _, _) ⇒ Right(i) - case ast.BigIntValue(i, _, _) ⇒ Right(i.intValue) - case _ ⇒ Left(IntCoercionViolation) + value => definition.name match { + case "Custom" => value match { + case ast.IntValue(i, _, _) => Right(i) + case ast.BigIntValue(i, _, _) => Right(i.intValue) + case _ => Left(IntCoercionViolation) } - case _ ⇒ Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) + case _ => Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) } override def scalarCoerceOutput(definition: ast.ScalarTypeDefinition) = - (coerced, _) ⇒ definition.name match { - case "Custom" ⇒ ast.IntValue(coerced.asInstanceOf[Int]) - case _ ⇒ throw DefaultIntrospectionSchemaBuilder.MaterializedSchemaException + (coerced, _) => definition.name match { + case "Custom" => ast.IntValue(coerced.asInstanceOf[Int]) + case _ => throw DefaultIntrospectionSchemaBuilder.MaterializedSchemaException } } @@ -1317,25 +1317,25 @@ class AstSchemaMaterializerSpec extends WordSpec with Matchers with FutureResult } } """, - Map("data" → + Map("data" -> Map( - "add1" → 133, - "add2" → 153, - "add3" → 912, - "a1" → Map( - "name" → "bar", - "__typename" → "Dog"), - "a2" → Map( - "name" → "foo", - "__typename" → "Cat"), - "a3" → Map( - "__typename" → "Dog", - "name" → "bar", - "nickname" → "baz"), - "a4" → Map( - "__typename" → "Cat", - "name" → "foo", - "age" → 10))), + "add1" -> 133, + "add2" -> 153, + "add3" -> 912, + "a1" -> Map( + "name" -> "bar", + "__typename" -> "Dog"), + "a2" -> Map( + "name" -> "foo", + "__typename" -> "Cat"), + "a3" -> Map( + "__typename" -> "Dog", + "name" -> "bar", + "nickname" -> "baz"), + "a4" -> Map( + "__typename" -> "Cat", + "name" -> "foo", + "age" -> 10))), """{"v": 456}""".parseJson ) } @@ -1366,7 +1366,7 @@ class AstSchemaMaterializerSpec extends WordSpec with Matchers with FutureResult typeDefinition: Either[ast.TypeDefinition, ObjectLikeType[Unit, _]], extensions: Vector[ast.ObjectLikeTypeExtensionDefinition], definition: ast.FieldDefinition, - mat: AstSchemaMaterializer[Unit]) = c ⇒ "test " + c.arg[String]("name") + mat: AstSchemaMaterializer[Unit]) = c => "test " + c.arg[String]("name") } val schema = Schema.buildFromAst(schemaAst, customBuilder) @@ -1377,7 +1377,7 @@ class AstSchemaMaterializerSpec extends WordSpec with Matchers with FutureResult addUser(name: "Bob") } """, - Map("data" → Map("addUser" → "test Bob"))) + Map("data" -> Map("addUser" -> "test Bob"))) schema.astDirectives.exists(_.name == "fromExt") should be (true) } @@ -1404,7 +1404,7 @@ class AstSchemaMaterializerSpec extends WordSpec with Matchers with FutureResult typeDefinition: Either[ast.TypeDefinition, ObjectLikeType[Unit, _]], extensions: Vector[ast.ObjectLikeTypeExtensionDefinition], definition: ast.FieldDefinition, - mat: AstSchemaMaterializer[Unit]) = c ⇒ "test " + c.arg[String]("name") + mat: AstSchemaMaterializer[Unit]) = c => "test " + c.arg[String]("name") } val schema = Schema.buildFromAst(schemaAst, customBuilder) @@ -1415,7 +1415,7 @@ class AstSchemaMaterializerSpec extends WordSpec with Matchers with FutureResult addUser(name: "Bob") } """, - Map("data" → Map("addUser" → "test Bob"))) + Map("data" -> Map("addUser" -> "test Bob"))) schema.astDirectives.exists(_.name == "fromExt") should be (true) } @@ -1434,7 +1434,7 @@ class AstSchemaMaterializerSpec extends WordSpec with Matchers with FutureResult val existingSchema = Schema(ObjectType("Query", fields[Unit, Unit]( - Field("test", StringType, resolve = _ ⇒ "test")))) + Field("test", StringType, resolve = _ => "test")))) val customBuilder = new DefaultAstSchemaBuilder[Unit] { override def resolveField( @@ -1442,7 +1442,7 @@ class AstSchemaMaterializerSpec extends WordSpec with Matchers with FutureResult typeDefinition: Either[ast.TypeDefinition, ObjectLikeType[Unit, _]], extensions: Vector[ast.ObjectLikeTypeExtensionDefinition], definition: ast.FieldDefinition, - mat: AstSchemaMaterializer[Unit]) = c ⇒ "test " + c.arg[String]("name") + mat: AstSchemaMaterializer[Unit]) = c => "test " + c.arg[String]("name") } val schema = existingSchema.extend(schemaAst, customBuilder) @@ -1453,7 +1453,7 @@ class AstSchemaMaterializerSpec extends WordSpec with Matchers with FutureResult addUser(name: "Bob") } """, - Map("data" → Map("addUser" → "test Bob"))) + Map("data" -> Map("addUser" -> "test Bob"))) schema.astDirectives.exists(_.name == "fromExt") should be (true) } @@ -1501,13 +1501,13 @@ class AstSchemaMaterializerSpec extends WordSpec with Matchers with FutureResult definition: ast.FieldDefinition, mat: AstSchemaMaterializer[Unit]) = if (definition.name == "foo") - _ ⇒ Some(()) + _ => Some(()) else if (definition.name endsWith "None") - _ ⇒ Value(None) + _ => Value(None) else if (definition.name endsWith "Null") - _ ⇒ Value(null) + _ => Value(null) else - _ ⇒ Value(None) + _ => Value(None) } val schema = Schema.buildFromAst(schemaAst, customBuilder) @@ -1529,18 +1529,18 @@ class AstSchemaMaterializerSpec extends WordSpec with Matchers with FutureResult } } """, - Map("foo" → null), + Map("foo" -> null), List( - """Cannot return null for non-nullable type""" → List(Pos(4, 17)), - """Cannot return null for non-nullable type""" → List(Pos(5, 17)), - """Cannot return null for non-nullable type""" → List(Pos(6, 17)), - """Cannot return null for non-nullable type""" → List(Pos(7, 17)), - """Cannot return null for non-nullable type""" → List(Pos(8, 17)), - """Cannot return null for non-nullable type""" → List(Pos(9, 17)), - """Cannot return null for non-nullable type""" → List(Pos(10, 17)), - """Cannot return null for non-nullable type""" → List(Pos(11, 17)), - """Cannot return null for non-nullable type""" → List(Pos(12, 17)), - """Cannot return null for non-nullable type""" → List(Pos(13, 17))) + """Cannot return null for non-nullable type""" -> List(Pos(4, 17)), + """Cannot return null for non-nullable type""" -> List(Pos(5, 17)), + """Cannot return null for non-nullable type""" -> List(Pos(6, 17)), + """Cannot return null for non-nullable type""" -> List(Pos(7, 17)), + """Cannot return null for non-nullable type""" -> List(Pos(8, 17)), + """Cannot return null for non-nullable type""" -> List(Pos(9, 17)), + """Cannot return null for non-nullable type""" -> List(Pos(10, 17)), + """Cannot return null for non-nullable type""" -> List(Pos(11, 17)), + """Cannot return null for non-nullable type""" -> List(Pos(12, 17)), + """Cannot return null for non-nullable type""" -> List(Pos(13, 17))) ) } } diff --git a/src/test/scala/sangria/schema/CustomScalarSpec.scala b/src/test/scala/sangria/schema/CustomScalarSpec.scala index 061d1eca..6d1747a3 100644 --- a/src/test/scala/sangria/schema/CustomScalarSpec.scala +++ b/src/test/scala/sangria/schema/CustomScalarSpec.scala @@ -19,20 +19,20 @@ class CustomScalarSpec extends WordSpec with Matchers { case object DateCoercionViolation extends ValueCoercionViolation("Date value expected") def parseDate(s: String) = Try(dateFormat.parse(s)) match { - case Success(d) ⇒ Right(d) - case Failure(error) ⇒ Left(DateCoercionViolation) + case Success(d) => Right(d) + case Failure(error) => Left(DateCoercionViolation) } val DateType = ScalarType[Date]("Date", description = Some("An example of date scalar type"), - coerceOutput = (d, _) ⇒ dateFormat.format(d), + coerceOutput = (d, _) => dateFormat.format(d), coerceUserInput = { - case s: String ⇒ parseDate(s) - case _ ⇒ Left(DateCoercionViolation) + case s: String => parseDate(s) + case _ => Left(DateCoercionViolation) }, coerceInput = { - case ast.StringValue(s, _, _, _, _) ⇒ parseDate(s) - case _ ⇒ Left(DateCoercionViolation) + case ast.StringValue(s, _, _, _, _) => parseDate(s) + case _ => Left(DateCoercionViolation) }) val DateArg = Argument("dateInput", DateType) @@ -40,7 +40,7 @@ class CustomScalarSpec extends WordSpec with Matchers { val QueryType = ObjectType("Query", fields[Unit, Unit]( Field("foo", DateType, arguments = DateArg :: Nil, - resolve = ctx ⇒ { + resolve = ctx => { val date: Date = ctx.arg(DateArg) new Date(date.getTime + 1000 * 60 * 60 * 24 * 5) }) @@ -54,7 +54,7 @@ class CustomScalarSpec extends WordSpec with Matchers { foo(dateInput: "2015-05-11") } """, - Map("data" → Map("foo" → "2015-05-16")) + Map("data" -> Map("foo" -> "2015-05-16")) ) checkContainsErrors(schema, (), @@ -64,7 +64,7 @@ class CustomScalarSpec extends WordSpec with Matchers { } """, null, - List("""Expected type 'Date!', found '"2015-05-test"'. Date value expected""" → List(Pos(3, 28))) + List("""Expected type 'Date!', found '"2015-05-test"'. Date value expected""" -> List(Pos(3, 28))) ) } } diff --git a/src/test/scala/sangria/schema/DefaultValueApplicationSpec.scala b/src/test/scala/sangria/schema/DefaultValueApplicationSpec.scala index 0eea7d50..702f3a41 100644 --- a/src/test/scala/sangria/schema/DefaultValueApplicationSpec.scala +++ b/src/test/scala/sangria/schema/DefaultValueApplicationSpec.scala @@ -15,7 +15,7 @@ import sangria.marshalling.sprayJson._ class DefaultValueApplicationSpec extends WordSpec with Matchers with FutureResultSupport { "Default value application" should { "use default value if argument is not provided" in { - val assertion = (args: Args) ⇒ args.withArgs(AArg, SizeArg) { (test, size) ⇒ + val assertion = (args: Args) => args.withArgs(AArg, SizeArg) { (test, size) => test should be ("default") size should be (42) @@ -28,7 +28,7 @@ class DefaultValueApplicationSpec extends WordSpec with Matchers with FutureResu } "use default value if argument is `null`" in { - val assertion = (args: Args) ⇒ args.withArgs(AArg) { test ⇒ + val assertion = (args: Args) => args.withArgs(AArg) { test => test should be ("default") args.argOpt(AArg) should be (None) @@ -41,7 +41,7 @@ class DefaultValueApplicationSpec extends WordSpec with Matchers with FutureResu } "not use default value if value is provided" in { - val assertion = (args: Args) ⇒ args.withArgs(AArg) { test ⇒ + val assertion = (args: Args) => args.withArgs(AArg) { test => test should be ("bar") args.argOpt(AArg) should be (Some("bar")) @@ -53,7 +53,7 @@ class DefaultValueApplicationSpec extends WordSpec with Matchers with FutureResu } "default variable value should not be used if variable is set to `null`" in { - check(graphql"""query ($$x: String = "bar") { test(a: $$x) }""", """{"x": null}""", args ⇒ { + check(graphql"""query ($$x: String = "bar") { test(a: $$x) }""", """{"x": null}""", args => { args.arg(AArg) should be ("bar") args.argOpt(AArg) should be (None) @@ -62,7 +62,7 @@ class DefaultValueApplicationSpec extends WordSpec with Matchers with FutureResu } "default variable value should not be used if variable is not set" in { - check(graphql"""query ($$x: String = "bar") { test(a: $$x) }""", """{}""", args ⇒ { + check(graphql"""query ($$x: String = "bar") { test(a: $$x) }""", """{}""", args => { args.arg(AArg) should be ("bar") args.argOpt(AArg) should be (Some("bar")) @@ -71,8 +71,8 @@ class DefaultValueApplicationSpec extends WordSpec with Matchers with FutureResu } "use default value if field is not provided" in { - val assertion = (args: Args) ⇒ args.withArgs(InpArg, InpJsonArg) { (inp, inpJson) ⇒ - inp should be (Map("f" → Some("default"))) + val assertion = (args: Args) => args.withArgs(InpArg, InpJsonArg) { (inp, inpJson) => + inp should be (Map("f" -> Some("default"))) inpJson should be ("""{"f": "default"}""".parseJson) } @@ -81,8 +81,8 @@ class DefaultValueApplicationSpec extends WordSpec with Matchers with FutureResu } "not use default value if field is `null`" in { - val assertion = (args: Args) ⇒ args.withArgs(InpArg, InpJsonArg) { (inp, inpJson) ⇒ - inp should be (Map("f" → None)) + val assertion = (args: Args) => args.withArgs(InpArg, InpJsonArg) { (inp, inpJson) => + inp should be (Map("f" -> None)) inpJson should be ("""{"f": null}""".parseJson) } @@ -92,8 +92,8 @@ class DefaultValueApplicationSpec extends WordSpec with Matchers with FutureResu } "not use default value if field value is provided" in { - val assertion = (args: Args) ⇒ args.withArgs(InpArg, InpJsonArg) { (inp, inpJson) ⇒ - inp should be (Map("f" → Some("bar"))) + val assertion = (args: Args) => args.withArgs(InpArg, InpJsonArg) { (inp, inpJson) => + inp should be (Map("f" -> Some("bar"))) inpJson should be ("""{"f": "bar"}""".parseJson) } @@ -103,24 +103,24 @@ class DefaultValueApplicationSpec extends WordSpec with Matchers with FutureResu "default variable value should not be used if variable is set to `null` for a field" in { check(graphql"""query ($$x: String = "bar") { testInp(inp: {f: $$x}, inpJson: {f: $$x}) }""", """{"x": null}""", - args ⇒ args.withArgs(InpArg, InpJsonArg) { (inp, inpJson) ⇒ - inp should be (Map("f" → None)) + args => args.withArgs(InpArg, InpJsonArg) { (inp, inpJson) => + inp should be (Map("f" -> None)) inpJson should be ("""{"f": null}""".parseJson) }) } "default variable value should not be used if variable is not set for a field" in { check(graphql"""query ($$x: String = "bar") { testInp(inp: {f: $$x}, inpJson: {f: $$x}) }""", """{}""", - args ⇒ args.withArgs(InpArg, InpJsonArg) { (inp, inpJson) ⇒ - inp should be (Map("f" → Some("bar"))) + args => args.withArgs(InpArg, InpJsonArg) { (inp, inpJson) => + inp should be (Map("f" -> Some("bar"))) inpJson should be ("""{"f": "bar"}""".parseJson) }) } "set fields to `null` if value is not set, but default is set to null" in { check(graphql"""query ($$x: String = "bar", $$y: String = null) { testInp(inp: {f: $$x, fo: $$y}, inpJson: {f: $$x, fo: $$y}) }""", """{}""", - args ⇒ args.withArgs(InpArg, InpJsonArg) { (inp, inpJson) ⇒ - inp should be (Map("f" → Some("bar"), "fo" → None)) + args => args.withArgs(InpArg, InpJsonArg) { (inp, inpJson) => + inp should be (Map("f" -> Some("bar"), "fo" -> None)) inpJson should be ("""{"f": "bar", "fo": null}""".parseJson) }) } @@ -146,14 +146,14 @@ class DefaultValueApplicationSpec extends WordSpec with Matchers with FutureResu val QueryType = ObjectType("Query", fields[Ctx, Unit]( Field("test", StringType, arguments = AArg :: SizeArg :: Nil, - resolve = c ⇒ { + resolve = c => { c.ctx.args = Some(c.args) "foo" }), Field("testInp", StringType, arguments = InpArg :: InpJsonArg :: Nil, - resolve = c ⇒ { + resolve = c => { c.ctx.args = Some(c.args) "foo" }) @@ -161,11 +161,11 @@ class DefaultValueApplicationSpec extends WordSpec with Matchers with FutureResu val schema = Schema(QueryType) - def check[T](query: Document, vars: String, assertions: Args ⇒ T): T = { + def check[T](query: Document, vars: String, assertions: Args => T): T = { val ctx = new Ctx Executor.execute(schema, query, ctx, variables = vars.parseJson).await assertions(ctx.args.getOrElse(fail("field was not used in query!"))) } -} \ No newline at end of file +} diff --git a/src/test/scala/sangria/schema/DefaultValuesSpec.scala b/src/test/scala/sangria/schema/DefaultValuesSpec.scala index 132794db..e2682d3f 100644 --- a/src/test/scala/sangria/schema/DefaultValuesSpec.scala +++ b/src/test/scala/sangria/schema/DefaultValuesSpec.scala @@ -22,7 +22,7 @@ class DefaultValuesSpec extends WordSpec with Matchers with FutureResultSupport val QueryType = ObjectType("Query", fields[CaptureCtx, Unit]( Field("foo", StringType, arguments = arg :: Nil, - resolve = ctx ⇒ { + resolve = ctx => { ctx.ctx.arg = Some(ctx.arg[Any]("test")) "result" }) @@ -34,7 +34,7 @@ class DefaultValuesSpec extends WordSpec with Matchers with FutureResultSupport val ctx = new CaptureCtx - Executor.execute(schema, query, userContext = ctx).await should be (JsObject("data" → JsObject("foo" → JsString("result")))) + Executor.execute(schema, query, userContext = ctx).await should be (JsObject("data" -> JsObject("foo" -> JsString("result")))) ctx.arg should be (Some(expectedResult)) @@ -54,12 +54,12 @@ class DefaultValuesSpec extends WordSpec with Matchers with FutureResultSupport """ Executor.execute(schema, introspectionQuery, userContext = ctx).await should be ( - JsObject("data" → - JsObject("__schema" → - JsObject("queryType" → - JsObject("fields" → JsArray( - JsObject("args" → JsArray( - JsObject("defaultValue" → JsString(expectedDefault)))))))))) + JsObject("data" -> + JsObject("__schema" -> + JsObject("queryType" -> + JsObject("fields" -> JsArray( + JsObject("args" -> JsArray( + JsObject("defaultValue" -> JsString(expectedDefault)))))))))) } @@ -138,19 +138,19 @@ class DefaultValuesSpec extends WordSpec with Matchers with FutureResultSupport expectedDefault = "[\"Hello\",\"World\"]") val ScalaInputType = complexInputType( - sharesDefault = scalaInput(Map("twitter" → 78)), - commentsDefault = scalaInput(List(Map("text" → "Foo"), Map("text" → "bar", "likes" → 3.2D)))) + sharesDefault = scalaInput(Map("twitter" -> 78)), + commentsDefault = scalaInput(List(Map("text" -> "Foo"), Map("text" -> "bar", "likes" -> 3.2D)))) "default scala complex object" in check( ScalaInputType, - defaultValue = scalaInput(Map("title" → "Post #1", "text" → "Amazing!", "comments" → List(Map("text" → "First! :P")))), + defaultValue = scalaInput(Map("title" -> "Post #1", "text" -> "Amazing!", "comments" -> List(Map("text" -> "First! :P")))), expectedResult = Map( - "title" → "Post #1", - "text" → Some("Amazing!"), - "tags" → Some(List("beginner", "scala")), - "views" → Some(12), - "shares" → Some(Map("twitter" → Some(78), "facebook" → Some(1))), - "comments" → Some(List(Map("author" → Some("anonymous"), "text" → "First! :P", "likes" → Some(1.5))))), + "title" -> "Post #1", + "text" -> Some("Amazing!"), + "tags" -> Some(List("beginner", "scala")), + "views" -> Some(12), + "shares" -> Some(Map("twitter" -> Some(78), "facebook" -> Some(1))), + "comments" -> Some(List(Map("author" -> Some("anonymous"), "text" -> "First! :P", "likes" -> Some(1.5))))), expectedDefault = "{title:\"Post #1\",text:\"Amazing!\",views:12,tags:[\"beginner\",\"scala\"],shares:{twitter:78,facebook:1},comments:[{author:\"anonymous\",text:\"First! :P\",likes:1.5}]}") "validate scalar default values" in { @@ -163,12 +163,12 @@ class DefaultValuesSpec extends WordSpec with Matchers with FutureResultSupport "validate complex default values" in { val BrokenInputType = complexInputType( - sharesDefault = scalaInput(Map("facebook" → 78)), - commentsDefault = scalaInput(List(Map("text" → "Foo"), Map("likes" → 3.2D)))) + sharesDefault = scalaInput(Map("facebook" -> 78)), + commentsDefault = scalaInput(List(Map("text" -> "Foo"), Map("likes" -> 3.2D)))) a [SchemaValidationException] should be thrownBy check( BrokenInputType, - defaultValue = scalaInput(Map("text" → "Amazing!", "comments" → List(Map("text" → "First! :P")))), + defaultValue = scalaInput(Map("text" -> "Amazing!", "comments" -> List(Map("text" -> "First! :P")))), expectedResult = (), expectedDefault = "") } @@ -209,47 +209,47 @@ class DefaultValuesSpec extends WordSpec with Matchers with FutureResultSupport expectedDefault = "[\"foo\",\"bar\"]") val JsonInputType = complexInputType( - sharesDefault = JsObject("twitter" → JsNumber(78)), + sharesDefault = JsObject("twitter" -> JsNumber(78)), commentsDefault = """[{"text": "Foo"}, {"text": "bar", "likes": 3.2}]""".parseJson) "default scala complex object" in check( JsonInputType, defaultValue = """{"title": "Post #1", "text": "Amazing!", "comments": [{"text": "First! :P"}]}""".parseJson, expectedResult = Map( - "title" → "Post #1", - "text" → Some("Amazing!"), - "tags" → Some(List("beginner", "scala")), - "views" → Some(12), - "shares" → Some(Map("twitter" → Some(78), "facebook" → Some(1))), - "comments" → Some(List(Map("author" → Some("anonymous"), "text" → "First! :P", "likes" → Some(1.5))))), + "title" -> "Post #1", + "text" -> Some("Amazing!"), + "tags" -> Some(List("beginner", "scala")), + "views" -> Some(12), + "shares" -> Some(Map("twitter" -> Some(78), "facebook" -> Some(1))), + "comments" -> Some(List(Map("author" -> Some("anonymous"), "text" -> "First! :P", "likes" -> Some(1.5))))), expectedDefault = "{title:\"Post #1\",text:\"Amazing!\",views:12,tags:[\"beginner\",\"scala\"],shares:{twitter:78,facebook:1},comments:[{author:\"anonymous\",text:\"First! :P\",likes:1.5}]}") "manual typeclass-based serialisation" in { implicit object SharesToInput extends ToInput[Shares, JsValue] { override def toInput(value: Shares) = { - val json = JsObject("twitter" → JsNumber(value.twitter), "facebook" → JsNumber(value.facebook)) + val json = JsObject("twitter" -> JsNumber(value.twitter), "facebook" -> JsNumber(value.facebook)) - json → sangria.marshalling.sprayJson.SprayJsonInputUnmarshaller + json -> sangria.marshalling.sprayJson.SprayJsonInputUnmarshaller } } implicit object CommentToInput extends ToInput[Comment, JsValue] { override def toInput(value: Comment) = { val json = JsObject( - "author" → JsString(value.author), - "text" → JsString(value.text), - "likes" → JsNumber(value.likes)) + "author" -> JsString(value.author), + "text" -> JsString(value.text), + "likes" -> JsNumber(value.likes)) - json → sangria.marshalling.sprayJson.SprayJsonInputUnmarshaller + json -> sangria.marshalling.sprayJson.SprayJsonInputUnmarshaller } } implicit def listToInput[T](implicit ev: ToInput[T, JsValue]): ToInput[List[T], JsValue] = new ToInput[List[T], JsValue] { override def toInput(value: List[T]) = { - val json = JsArray(value.toVector map ((v: T) ⇒ ev.toInput(v)._1)) + val json = JsArray(value.toVector map ((v: T) => ev.toInput(v)._1)) - json → sangria.marshalling.sprayJson.SprayJsonInputUnmarshaller + json -> sangria.marshalling.sprayJson.SprayJsonInputUnmarshaller } } @@ -261,14 +261,14 @@ class DefaultValuesSpec extends WordSpec with Matchers with FutureResultSupport CustomInputType, defaultValue = """{"title": "Post #1", "text": "Amazing!"}""".parseJson, expectedResult = Map( - "title" → "Post #1", - "text" → Some("Amazing!"), - "tags" → Some(List("beginner", "scala")), - "views" → Some(12), - "shares" → Some(Map("twitter" → Some(123), "facebook" → Some(456))), - "comments" → Some(List( - Map("author" → Some("John Doe"), "text" → "Nice post!", "likes" → Some(100)), - Map("author" → Some("Foo"), "text" → "Bar", "likes" → Some(0.1))))), + "title" -> "Post #1", + "text" -> Some("Amazing!"), + "tags" -> Some(List("beginner", "scala")), + "views" -> Some(12), + "shares" -> Some(Map("twitter" -> Some(123), "facebook" -> Some(456))), + "comments" -> Some(List( + Map("author" -> Some("John Doe"), "text" -> "Nice post!", "likes" -> Some(100)), + Map("author" -> Some("Foo"), "text" -> "Bar", "likes" -> Some(0.1))))), expectedDefault = "{title:\"Post #1\",text:\"Amazing!\",views:12,tags:[\"beginner\",\"scala\"],shares:{twitter:123,facebook:456},comments:[{author:\"John Doe\",text:\"Nice post!\",likes:100},{author:\"Foo\",text:\"Bar\",likes:0.1}]}") } @@ -289,16 +289,16 @@ class DefaultValuesSpec extends WordSpec with Matchers with FutureResultSupport CustomInputType, defaultValue = """{"title": "Post #1", "text": "Amazing!"}""".parseJson, expectedResult = Map( - "title" → "Post #1", - "text" → Some("Amazing!"), - "tags" → Some(List("beginner", "scala")), - "views" → Some(12), - "shares" → Some(Map("twitter" → Some(123), "facebook" → Some(456))), - "comments" → Some(List( - Map("author" → Some("John Doe"), "text" → "Nice post!", "likes" → Some(100)), - Map("author" → Some("Foo"), "text" → "Bar", "likes" → Some(0.1))))), + "title" -> "Post #1", + "text" -> Some("Amazing!"), + "tags" -> Some(List("beginner", "scala")), + "views" -> Some(12), + "shares" -> Some(Map("twitter" -> Some(123), "facebook" -> Some(456))), + "comments" -> Some(List( + Map("author" -> Some("John Doe"), "text" -> "Nice post!", "likes" -> Some(100)), + Map("author" -> Some("Foo"), "text" -> "Bar", "likes" -> Some(0.1))))), expectedDefault = "{title:\"Post #1\",text:\"Amazing!\",views:12,tags:[\"beginner\",\"scala\"],shares:{twitter:123,facebook:456},comments:[{author:\"John Doe\",text:\"Nice post!\",likes:100},{author:\"Foo\",text:\"Bar\",likes:0.1}]}") } } } -} \ No newline at end of file +} diff --git a/src/test/scala/sangria/schema/EnumTypeSpec.scala b/src/test/scala/sangria/schema/EnumTypeSpec.scala index 68b81c0a..18d1744c 100644 --- a/src/test/scala/sangria/schema/EnumTypeSpec.scala +++ b/src/test/scala/sangria/schema/EnumTypeSpec.scala @@ -19,11 +19,11 @@ class EnumTypeSpec extends WordSpec with Matchers with GraphQlSupport { val QueryType = ObjectType("Query", fields[Unit, Unit]( Field("colorEnum", OptionType(ColorType), arguments = Args.fromEnum :: Args.fromInt :: Nil, - resolve = ctx ⇒ + resolve = ctx => ctx.args.arg(Args.fromInt).orElse(ctx.args.arg(Args.fromEnum))), Field("colorInt", OptionType(IntType), arguments = Args.fromEnum :: Args.fromInt :: Nil, - resolve = ctx ⇒ + resolve = ctx => ctx.args.arg(Args.fromInt).orElse(ctx.args.arg(Args.fromEnum))) )) @@ -45,71 +45,71 @@ class EnumTypeSpec extends WordSpec with Matchers with GraphQlSupport { "accepts enum literals as input" in check( (), "{ colorInt(fromEnum: GREEN) }", - Map("data" → Map("colorInt" → 1))) + Map("data" -> Map("colorInt" -> 1))) "enum may be output type" in check( (), "{ colorEnum(fromInt: 1) }", - Map("data" → Map("colorEnum" → "GREEN"))) + Map("data" -> Map("colorEnum" -> "GREEN"))) "enum may be both input and output type" in check( (), "{ colorEnum(fromEnum: GREEN) }", - Map("data" → Map("colorEnum" → "GREEN"))) + Map("data" -> Map("colorEnum" -> "GREEN"))) "does not accept string literals" in checkContainsErrors( (), """{ colorEnum(fromEnum: "GREEN") }""", - Map("colorEnum" → null), - List("Argument 'fromEnum' has wrong value: Enum value expected." → List(Pos(1, 3), Pos(1, 23))), + Map("colorEnum" -> null), + List("Argument 'fromEnum' has wrong value: Enum value expected." -> List(Pos(1, 3), Pos(1, 23))), validateQuery = false) "does not accept internal value in place of enum literal" in checkContainsErrors( (), """{ colorEnum(fromEnum: 1) }""", - Map("colorEnum" → null), - List("Argument 'fromEnum' has wrong value: Enum value expected." → List(Pos(1, 3), Pos(1, 23))), + Map("colorEnum" -> null), + List("Argument 'fromEnum' has wrong value: Enum value expected." -> List(Pos(1, 3), Pos(1, 23))), validateQuery = false) "accepts JSON string as enum variable" in check( (), """query test($color: Color!) { colorEnum(fromEnum: $color) }""", - Map("data" → Map("colorEnum" → "BLUE")), - JsObject("color" → JsString("BLUE"))) + Map("data" -> Map("colorEnum" -> "BLUE")), + JsObject("color" -> JsString("BLUE"))) "accepts enum literals as input arguments to mutations" in check( (), """mutation x($color: Color!) { favoriteEnum(fromEnum: $color) }""", - Map("data" → Map("favoriteEnum" → "GREEN")), - JsObject("color" → JsString("GREEN"))) + Map("data" -> Map("favoriteEnum" -> "GREEN")), + JsObject("color" -> JsString("GREEN"))) "accepts enum literals as input arguments to subscriptions" in check( (), """subscription x($color: Color!) { subscribeToEnum(fromEnum: $color) }""", - Map("data" → Map("subscribeToEnum" → "GREEN")), - JsObject("color" → JsString("GREEN"))) + Map("data" -> Map("subscribeToEnum" -> "GREEN")), + JsObject("color" -> JsString("GREEN"))) "does not accept internal value as enum variables" in checkContainsErrors( (), """query test($color: Color!) { colorEnum(fromEnum: $color) }""", null, - List("Variable '$color' expected value of type 'Color!' but got: 1. Reason: Enum value expected" → List(Pos(1, 12))), - JsObject("color" → JsNumber(1))) + List("Variable '$color' expected value of type 'Color!' but got: 1. Reason: Enum value expected" -> List(Pos(1, 12))), + JsObject("color" -> JsNumber(1))) "does not accept string variables as enum input" in checkContainsErrors( (), """query test($color: String!) { colorEnum(fromEnum: $color) }""", null, - List("Variable '$color' of type 'String!' used in position expecting type 'Color'." → List(Pos(1, 12), Pos(1, 51))), - JsObject("color" → JsString("BLUE")), + List("Variable '$color' of type 'String!' used in position expecting type 'Color'." -> List(Pos(1, 12), Pos(1, 51))), + JsObject("color" -> JsString("BLUE")), validateQuery = true) "does not accept internal value variable as enum input" in checkContainsErrors( (), """query test($color: Int!) { colorEnum(fromEnum: $color) }""", null, - List("Variable '$color' of type 'Int!' used in position expecting type 'Color'." → List(Pos(1, 12), Pos(1, 48))), - JsObject("color" → JsNumber(2)), + List("Variable '$color' of type 'Int!' used in position expecting type 'Color'." -> List(Pos(1, 12), Pos(1, 48))), + JsObject("color" -> JsNumber(2)), validateQuery = true) "enum value may have an internal value of 0" in check( @@ -120,9 +120,9 @@ class EnumTypeSpec extends WordSpec with Matchers with GraphQlSupport { colorInt(fromEnum: RED) } """, - Map("data" → Map( - "colorEnum" → "RED", - "colorInt" → 0))) + Map("data" -> Map( + "colorEnum" -> "RED", + "colorInt" -> 0))) "enum inputs may be nullable" in check( (), @@ -132,8 +132,8 @@ class EnumTypeSpec extends WordSpec with Matchers with GraphQlSupport { colorInt } """, - Map("data" → Map( - "colorEnum" → null, - "colorInt" → null))) + Map("data" -> Map( + "colorEnum" -> null, + "colorInt" -> null))) } } diff --git a/src/test/scala/sangria/schema/IntrospectionSchemaMaterializerSpec.scala b/src/test/scala/sangria/schema/IntrospectionSchemaMaterializerSpec.scala index b499ee99..482f7de3 100644 --- a/src/test/scala/sangria/schema/IntrospectionSchemaMaterializerSpec.scala +++ b/src/test/scala/sangria/schema/IntrospectionSchemaMaterializerSpec.scala @@ -33,76 +33,76 @@ class IntrospectionSchemaMaterializerSpec extends WordSpec with Matchers with Fu clientSchema } - lazy val RecursiveType: ObjectType[Any, Any] = ObjectType("Recur", () ⇒ fields( - Field("recur", OptionType(RecursiveType), resolve = _ ⇒ None) + lazy val RecursiveType: ObjectType[Any, Any] = ObjectType("Recur", () => fields( + Field("recur", OptionType(RecursiveType), resolve = _ => None) )) - lazy val DogType: ObjectType[Any, Any] = ObjectType("Dog", () ⇒ fields( - Field("bestFriend", OptionType(HumanType), resolve = _ ⇒ None) + lazy val DogType: ObjectType[Any, Any] = ObjectType("Dog", () => fields( + Field("bestFriend", OptionType(HumanType), resolve = _ => None) )) - lazy val HumanType: ObjectType[Any, Any] = ObjectType("Human", () ⇒ fields( - Field("bestFriend", OptionType(DogType), resolve = _ ⇒ None) + lazy val HumanType: ObjectType[Any, Any] = ObjectType("Human", () => fields( + Field("bestFriend", OptionType(DogType), resolve = _ => None) )) - lazy val FriendlyType: InterfaceType[Any, Any] = InterfaceType("Friendly", () ⇒ fields( - Field("bestFriend", OptionType(FriendlyType), Some("The best friend of this friendly thing"), resolve = _ ⇒ None) + lazy val FriendlyType: InterfaceType[Any, Any] = InterfaceType("Friendly", () => fields( + Field("bestFriend", OptionType(FriendlyType), Some("The best friend of this friendly thing"), resolve = _ => None) )) - lazy val DogUnionType: ObjectType[Any, Any] = ObjectType("Dog", () ⇒ fields( - Field("bestFriend", OptionType(FriendlyUnionType), resolve = _ ⇒ None) + lazy val DogUnionType: ObjectType[Any, Any] = ObjectType("Dog", () => fields( + Field("bestFriend", OptionType(FriendlyUnionType), resolve = _ => None) )) - lazy val HumanUnionType: ObjectType[Any, Any] = ObjectType("Human", () ⇒ fields( - Field("bestFriend", OptionType(FriendlyUnionType), resolve = _ ⇒ None) + lazy val HumanUnionType: ObjectType[Any, Any] = ObjectType("Human", () => fields( + Field("bestFriend", OptionType(FriendlyUnionType), resolve = _ => None) )) lazy val FriendlyUnionType = UnionType("Friendly", types = DogUnionType :: HumanUnionType :: Nil) val CustomScalar = ScalarType[Int]("Custom", description = Some("Some custom"), - coerceOutput = (i, _) ⇒ ast.IntValue(i), + coerceOutput = (i, _) => ast.IntValue(i), coerceUserInput = { - case i: Int ⇒ Right(i) - case _ ⇒ Left(IntCoercionViolation) + case i: Int => Right(i) + case _ => Left(IntCoercionViolation) }, coerceInput = { - case ast.IntValue(i, _, _) ⇒ Right(i) - case _ ⇒ Left(IntCoercionViolation) + case ast.IntValue(i, _, _) => Right(i) + case _ => Left(IntCoercionViolation) }) "Type System: build schema from introspection" should { "builds a simple schema" in testSchema( Schema(ObjectType("Simple", "This is a simple type", fields[Any, Any]( - Field("string", OptionType(StringType), Some("This is a string field"), resolve = _ ⇒ "foo"))))) + Field("string", OptionType(StringType), Some("This is a string field"), resolve = _ => "foo"))))) "builds a simple schema with all operation types" in testSchema( Schema( query = ObjectType("QueryType", "This is a simple query type", fields[Any, Any]( - Field("string", OptionType(StringType), Some("This is a string field"), resolve = _ ⇒ "foo"))), + Field("string", OptionType(StringType), Some("This is a string field"), resolve = _ => "foo"))), mutation = Some( ObjectType("MutationType", "This is a simple mutation type", fields[Any, Any]( Field("setString", OptionType(StringType), Some("Set the string field"), arguments = Argument("value", OptionInputType(StringType)) :: Nil, - resolve = _ ⇒ "foo")))), + resolve = _ => "foo")))), subscription = Some( ObjectType("SubscriptionType", "This is a simple subscription type", fields[Any, Any]( - Field("string", OptionType(StringType), Some("This is a string field for sub"), resolve = _ ⇒ "foo")))) + Field("string", OptionType(StringType), Some("This is a string field for sub"), resolve = _ => "foo")))) )) "uses built-in scalars when possible" in { val clientSchema = testSchema( Schema(ObjectType("Scalars", fields[Any, Any]( - Field("int", IntType, resolve = _ ⇒ 1), - Field("long", LongType, resolve = _ ⇒ 1L), - Field("float", FloatType, resolve = _ ⇒ 1.1), - Field("bool", BooleanType, resolve = _ ⇒ true), - Field("bigInt", BigIntType, resolve = _ ⇒ BigInt(1)), - Field("bigDec", BigDecimalType, resolve = _ ⇒ BigDecimal(1.0)), - Field("id", IDType, resolve = _ ⇒ "foo"), - Field("custom", CustomScalar, resolve = _ ⇒ 123), - Field("string", StringType, resolve = _ ⇒ "foo"))))) + Field("int", IntType, resolve = _ => 1), + Field("long", LongType, resolve = _ => 1L), + Field("float", FloatType, resolve = _ => 1.1), + Field("bool", BooleanType, resolve = _ => true), + Field("bigInt", BigIntType, resolve = _ => BigInt(1)), + Field("bigDec", BigDecimalType, resolve = _ => BigDecimal(1.0)), + Field("id", IDType, resolve = _ => "foo"), + Field("custom", CustomScalar, resolve = _ => 123), + Field("string", StringType, resolve = _ => "foo"))))) def fieldType(fieldName: String) = clientSchema.outputTypes("Scalars").asInstanceOf[ObjectType[_, _]].getField(clientSchema, fieldName).head.fieldType @@ -123,42 +123,42 @@ class IntrospectionSchemaMaterializerSpec extends WordSpec with Matchers with Fu "builds a schema with an interface" in { val dog = ObjectType("Dog", interfaces[Any, Any](FriendlyType), fields[Any, Any]( - Field("bestFriend", OptionType(FriendlyType), resolve = _ ⇒ None) + Field("bestFriend", OptionType(FriendlyType), resolve = _ => None) )) lazy val human = ObjectType("Human", interfaces[Any, Any](FriendlyType), fields[Any, Any]( - Field("bestFriend", OptionType(FriendlyType), resolve = _ ⇒ None) + Field("bestFriend", OptionType(FriendlyType), resolve = _ => None) )) testSchema(Schema( query = ObjectType("WithInterface", fields[Any, Any]( - Field("friendly", OptionType(FriendlyType), resolve = _ ⇒ None))), + Field("friendly", OptionType(FriendlyType), resolve = _ => None))), additionalTypes = dog :: human :: Nil)) } "builds a schema with a union" in testSchema( Schema(ObjectType("WithUnion", fields[Any, Any]( - Field("friendly", OptionType(FriendlyUnionType), resolve = _ ⇒ None))))) + Field("friendly", OptionType(FriendlyUnionType), resolve = _ => None))))) "builds a schema with complex field values" in testSchema( Schema(ObjectType("ComplexFields", fields[Any, Any]( - Field("string", OptionType(StringType), resolve = _ ⇒ None), - Field("listOfString", OptionType(ListType(OptionType(StringType))), resolve = _ ⇒ None), - Field("nonNullString", StringType, resolve = _ ⇒ "foo"), - Field("nonNullListOfString", ListType(OptionType(StringType)), resolve = _ ⇒ Nil), - Field("nonNullListOfNonNullString", ListType(StringType), resolve = _ ⇒ Nil))))) + Field("string", OptionType(StringType), resolve = _ => None), + Field("listOfString", OptionType(ListType(OptionType(StringType))), resolve = _ => None), + Field("nonNullString", StringType, resolve = _ => "foo"), + Field("nonNullListOfString", ListType(OptionType(StringType)), resolve = _ => Nil), + Field("nonNullListOfNonNullString", ListType(StringType), resolve = _ => Nil))))) "builds a schema with field arguments" in testSchema( Schema(ObjectType("ArgFields", fields[Any, Any]( Field("one", OptionType(StringType), Some("A field with a single arg"), arguments = Argument("intArg", OptionInputType(IntType), description = "This is an int arg") :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("two", OptionType(StringType), Some("A field with a two args"), arguments = Argument("listArg", OptionInputType(ListInputType(OptionInputType(IntType))), description = "This is an list of int arg") :: Argument("requiredArg", BooleanType, description = "This is a required arg") :: Nil, - resolve = _ ⇒ None))))) + resolve = _ => None))))) "builds a schema with an enum" in { val foodType = EnumType("Food", Some("Varieties of food stuffs"), List( @@ -172,7 +172,7 @@ class IntrospectionSchemaMaterializerSpec extends WordSpec with Matchers with Fu Schema(ObjectType("EnumFields", fields[Any, Any]( Field("food", OptionType(foodType), Some("Repeats the arg you give it"), arguments = Argument("kind", OptionInputType(foodType), description = "what kind of food?") :: Nil, - resolve = _ ⇒ None))))) + resolve = _ => None))))) clientSchema.allTypes("Food") shouldNot be theSameInstanceAs foodType @@ -194,7 +194,7 @@ class IntrospectionSchemaMaterializerSpec extends WordSpec with Matchers with Fu Schema(ObjectType("HasInputObjectFields", fields[Any, Any]( Field("geocode", OptionType(StringType), Some("Get a geocode from an address"), arguments = Argument("address", OptionInputType(addressType), description = "The address to lookup") :: Nil, - resolve = _ ⇒ None))))) + resolve = _ => None))))) } "builds a schema with field arguments with default values" in { @@ -206,41 +206,41 @@ class IntrospectionSchemaMaterializerSpec extends WordSpec with Matchers with Fu Schema(ObjectType("ArgFields", fields[Any, Any]( Field("defaultInt", OptionType(StringType), arguments = Argument("intArg", OptionInputType(IntType), 10) :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("defaultList", OptionType(StringType), arguments = Argument("listArg", OptionInputType(ListInputType(OptionInputType(IntType))), scalaInput(Vector(1, 2, 3))) :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("defaultObject", OptionType(StringType), - arguments = Argument("objArg", OptionInputType(geoType), scalaInput(Map("lat" → 37.485D, "lon" → -122.148D))) :: Nil, - resolve = _ ⇒ None))))) + arguments = Argument("objArg", OptionInputType(geoType), scalaInput(Map("lat" -> 37.485D, "lon" -> -122.148D))) :: Nil, + resolve = _ => None))))) } "builds a schema with custom directives" in testSchema( Schema( query = ObjectType("Simple", "This is a simple type", fields[Any, Any]( Field("string", OptionType(StringType), Some("This is a string field"), - resolve = _ ⇒ None))), + resolve = _ => None))), directives = BuiltinDirectives ++ List(Directive("customDirective", Some("This is a custom directive"), - shouldInclude = _ ⇒ true, + shouldInclude = _ => true, locations = Set(DirectiveLocation.Field))))) "builds a schema aware of deprecation" in testSchema( Schema(ObjectType("Simple", "This is a simple type", fields[Any, Any]( Field("shinyString", OptionType(StringType), Some("This is a shiny string field"), - resolve = _ ⇒ None), + resolve = _ => None), Field("deprecatedString", OptionType(StringType), Some("This is a deprecated string field"), deprecationReason = Some("Use shinyString"), - resolve = _ ⇒ None), + resolve = _ => None), Field("color", fieldType = OptionType(EnumType("Color", values = List( EnumValue("RED", Some("So rosy"), "RED"), EnumValue("GREEN", Some("So grassy"), "GREEN"), EnumValue("BLUE", Some("So calming"), "BLUE"), EnumValue("MAUVE", Some("So sickening"), "MAUVE", deprecationReason = Some("No longer in fashion"))))), - resolve = _ ⇒ None))))) + resolve = _ => None))))) "builds a schema with description" in testSchema( - Schema(ObjectType("Simple", "This is a simple type", fields[Any, Any](Field("shinyString", OptionType(StringType), resolve = _ ⇒ None))), + Schema(ObjectType("Simple", "This is a simple type", fields[Any, Any](Field("shinyString", OptionType(StringType), resolve = _ => None))), description = Some("test"))) "cannot use client schema for general execution" in { @@ -251,12 +251,12 @@ class IntrospectionSchemaMaterializerSpec extends WordSpec with Matchers with Fu Argument("custom1", OptionInputType(CustomScalar)) :: Argument("custom2", OptionInputType(CustomScalar)) :: Nil, - resolve = _ ⇒ None))))) + resolve = _ => None))))) checkContainsErrors(clientSchema, (), "query NoNo($v: Custom) { foo(custom1: 123, custom2: $v) }", null, - List("""Schema was materialized and cannot be used for any queries except introspection queries.""" → List(Pos(1, 39))), + List("""Schema was materialized and cannot be used for any queries except introspection queries.""" -> List(Pos(1, 39))), args = """{"v": 456}""".parseJson ) } @@ -270,45 +270,45 @@ class IntrospectionSchemaMaterializerSpec extends WordSpec with Matchers with Fu Argument("custom1", OptionInputType(CustomScalar)) :: Argument("custom2", OptionInputType(CustomScalar)) :: Nil, - resolve = _ ⇒ None)))) + resolve = _ => None)))) val initialIntrospection = Executor.execute(serverSchema, introspectionQuery).await val customBuilder = new DefaultIntrospectionSchemaBuilder[Unit] { override def resolveField(typeDefinition: IntrospectionType, definition: IntrospectionField) = - ctx ⇒ (ctx.parentType.name, ctx.field.name) match { - case ("Query", "foo") ⇒ + ctx => (ctx.parentType.name, ctx.field.name) match { + case ("Query", "foo") => for { - a ← ctx.argOpt[Int]("custom1") - b ← ctx.argOpt[Int]("custom2") + a <- ctx.argOpt[Int]("custom1") + b <- ctx.argOpt[Int]("custom2") } yield a + b - case _ ⇒ throw DefaultIntrospectionSchemaBuilder.MaterializedSchemaException + case _ => throw DefaultIntrospectionSchemaBuilder.MaterializedSchemaException } override def scalarCoerceUserInput(definition: IntrospectionScalarType) = - value ⇒ definition.name match { - case "Custom" ⇒ value match { - case i: Int ⇒ Right(i) - case i: BigInt ⇒ Right(i.intValue) - case _ ⇒ Left(IntCoercionViolation) + value => definition.name match { + case "Custom" => value match { + case i: Int => Right(i) + case i: BigInt => Right(i.intValue) + case _ => Left(IntCoercionViolation) } - case _ ⇒ Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) + case _ => Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) } override def scalarCoerceInput(definition: IntrospectionScalarType) = - value ⇒ definition.name match { - case "Custom" ⇒ value match { - case ast.IntValue(i, _, _) ⇒ Right(i) - case ast.BigIntValue(i, _, _) ⇒ Right(i.intValue) - case _ ⇒ Left(IntCoercionViolation) + value => definition.name match { + case "Custom" => value match { + case ast.IntValue(i, _, _) => Right(i) + case ast.BigIntValue(i, _, _) => Right(i.intValue) + case _ => Left(IntCoercionViolation) } - case _ ⇒ Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) + case _ => Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) } override def scalarCoerceOutput(definition: IntrospectionScalarType) = - (coerced, _) ⇒ definition.name match { - case "Custom" ⇒ ast.IntValue(coerced.asInstanceOf[Int]) - case _ ⇒ throw DefaultIntrospectionSchemaBuilder.MaterializedSchemaException + (coerced, _) => definition.name match { + case "Custom" => ast.IntValue(coerced.asInstanceOf[Int]) + case _ => throw DefaultIntrospectionSchemaBuilder.MaterializedSchemaException } } @@ -316,7 +316,7 @@ class IntrospectionSchemaMaterializerSpec extends WordSpec with Matchers with Fu check(clientSchema, (), "query Yeah($v: Custom) { foo(custom1: 123, custom2: $v) }", - Map("data" → Map("foo" → 579)), + Map("data" -> Map("foo" -> 579)), """{"v": 456}""".parseJson ) } diff --git a/src/test/scala/sangria/schema/ResolverBasedAstSchemaBuilderSpec.scala b/src/test/scala/sangria/schema/ResolverBasedAstSchemaBuilderSpec.scala index d0bbe8c0..619cad08 100644 --- a/src/test/scala/sangria/schema/ResolverBasedAstSchemaBuilderSpec.scala +++ b/src/test/scala/sangria/schema/ResolverBasedAstSchemaBuilderSpec.scala @@ -22,20 +22,20 @@ class ResolverBasedAstSchemaBuilderSpec extends WordSpec with Matchers with Futu case object UUIDViolation extends BaseViolation("Invalid UUID") def parseUuid(s: String) = Try(UUID.fromString(s)) match { - case Success(s) ⇒ Right(s) - case Failure(e) ⇒ Left(UUIDViolation) + case Success(s) => Right(s) + case Failure(e) => Left(UUIDViolation) } val UUIDType = ScalarType[UUID]("UUID", - coerceOutput = (v, _) ⇒ v.toString, + coerceOutput = (v, _) => v.toString, coerceUserInput = { - case s: String ⇒ parseUuid(s) - case _ ⇒ Left(UUIDViolation) + case s: String => parseUuid(s) + case _ => Left(UUIDViolation) }, coerceInput = { - case ast.StringValue(s, _, _, _, _) ⇒ parseUuid(s) - case _ ⇒ Left(UUIDViolation) + case ast.StringValue(s, _, _, _, _) => parseUuid(s) + case _ => Left(UUIDViolation) }) "ResolverBasedAstSchemaBuilder" should { @@ -54,7 +54,7 @@ class ResolverBasedAstSchemaBuilderSpec extends WordSpec with Matchers with Futu def intValidationAlias(min: Option[Int], max: Option[Int]) = ScalarAlias[Int, Int](IntType, toScalar = identity, - fromScalar = v ⇒ { + fromScalar = v => { if (min.isDefined && v < min.get) Left(CustomIntViolation(v, min, max)) else if (max.isDefined && v > max.get) Left(CustomIntViolation(v, min, max)) else Right(v) @@ -68,18 +68,18 @@ class ResolverBasedAstSchemaBuilderSpec extends WordSpec with Matchers with Futu val builder = resolverBased[Any]( AdditionalTypes(UUIDType), - DirectiveFieldProvider(AddExtraFieldsDir, c ⇒ List(MaterializedField(c.origin, + DirectiveFieldProvider(AddExtraFieldsDir, c => List(MaterializedField(c.origin, ast.FieldDefinition("extraField", ast.NamedType("Int"), Vector.empty)))), - DynamicDirectiveFieldProvider[Any, JsValue]("addExtraDynFields", c ⇒ List(MaterializedField(c.origin, + DynamicDirectiveFieldProvider[Any, JsValue]("addExtraDynFields", c => List(MaterializedField(c.origin, Field("extraDynField", c.materializer.getScalarType(c.origin, ast.NamedType("String")), - resolve = (_: Context[Any, Any]) ⇒ "foo")))), + resolve = (_: Context[Any, Any]) => "foo")))), AdditionalDirectives(Seq(NumDir)), - DirectiveInputTypeResolver(ValidateIntDir, c ⇒ c.withArgs(MinArg, MaxArg)((min, max) ⇒ + DirectiveInputTypeResolver(ValidateIntDir, c => c.withArgs(MinArg, MaxArg)((min, max) => c.inputType(c.definition.valueType, intValidationAlias(min, max)))), - DirectiveScalarResolver(CoolDir, _ ⇒ StringType), + DirectiveScalarResolver(CoolDir, _ => StringType), DirectiveResolver(TestDir, resolve = _.arg(ValueArg)), DynamicDirectiveResolver[Any, JsValue]("json", resolve = _.args), - FieldResolver {case (TypeName("Query"), FieldName("id")) ⇒ _ ⇒ UUID.fromString("a26bdfd4-0fcf-484f-b363-585091b3319f")}, + FieldResolver {case (TypeName("Query"), FieldName("id")) => _ => UUID.fromString("a26bdfd4-0fcf-484f-b363-585091b3319f")}, LegacyCommentDescriptionsResolver(), AnyFieldResolver.defaultInput[Any, JsValue]) @@ -109,7 +109,7 @@ class ResolverBasedAstSchemaBuilderSpec extends WordSpec with Matchers with Futu """ val collectedValue = schemaAst.analyzer.resolveDirectives( - GenericDirectiveResolver(NumDir, resolve = c ⇒ Some(c arg NVArg))).sum + GenericDirectiveResolver(NumDir, resolve = c => Some(c arg NVArg))).sum collectedValue should be (145) @@ -251,23 +251,23 @@ class ResolverBasedAstSchemaBuilderSpec extends WordSpec with Matchers with Futu val AddFinalDir = Directive("addFinal", locations = Set(DL.Schema, DL.FieldDefinition)) val builder = resolverBased[Any]( - DirectiveResolver(ConstDir, c ⇒ c.arg(ValueArg)), - DirectiveResolver(AddDir, c ⇒ c.withArgs(ValueArg) { value ⇒ + DirectiveResolver(ConstDir, c => c.arg(ValueArg)), + DirectiveResolver(AddDir, c => c.withArgs(ValueArg) { value => c.lastValue match { - case Some(last) ⇒ last.map(_ + value) - case None ⇒ value + case Some(last) => last.map(_ + value) + case None => value } }), DirectiveResolver(AddFinalDir, - c ⇒ { + c => { val finalValue = c.ctx.arg[String]("final") c.lastValue match { - case Some(last) ⇒ last.map(_ + finalValue) - case None ⇒ finalValue + case Some(last) => last.map(_ + finalValue) + case None => finalValue } }, - complexity = Some(_ ⇒ (_, _, _) ⇒ 100.0))) + complexity = Some(_ => (_, _, _) => 100.0))) val schemaAst = gql""" @@ -288,13 +288,13 @@ class ResolverBasedAstSchemaBuilderSpec extends WordSpec with Matchers with Futu """ val complexity = new AtomicInteger(0) - val reducer = QueryReducer.measureComplexity[Any]((c, _) ⇒ complexity.set(c.toInt)) + val reducer = QueryReducer.measureComplexity[Any]((c, _) => complexity.set(c.toInt)) Executor.execute(schema, query, queryReducers = reducer :: Nil).await should be ( Map( - "data" → Map( - "myStr" → "first-second-last", - "myStr1" → "realFirst-second"))) + "data" -> Map( + "myStr" -> "first-second-last", + "myStr1" -> "realFirst-second"))) complexity.get should be (200) } @@ -302,13 +302,13 @@ class ResolverBasedAstSchemaBuilderSpec extends WordSpec with Matchers with Futu "resolve enum values" in { val builder = resolverBased[Any]( SimpleEnumValueResolver { - case (TypeName("Color"), v) if v.name == "RED" ⇒ "#FF0000" - case (TypeName("Color"), v) if v.name == "GREEN" ⇒ "#00FF00" - case (TypeName("Color"), v) if v.name == "BLUE" ⇒ "#0000FF" + case (TypeName("Color"), v) if v.name == "RED" => "#FF0000" + case (TypeName("Color"), v) if v.name == "GREEN" => "#00FF00" + case (TypeName("Color"), v) if v.name == "BLUE" => "#0000FF" }, FieldResolver { - case (TypeName("Mutation"), FieldName("eat")) ⇒ - ctx ⇒ "tasty " + ctx.arg[String]("color") + " " + ctx.arg[InputObjectType.DefaultInput]("fruit")("color") + case (TypeName("Mutation"), FieldName("eat")) => + ctx => "tasty " + ctx.arg[String]("color") + " " + ctx.arg[InputObjectType.DefaultInput]("fruit")("color") }) val schemaAst = @@ -329,9 +329,9 @@ class ResolverBasedAstSchemaBuilderSpec extends WordSpec with Matchers with Futu val existingSchema = Schema( query = ObjectType("Query", fields[Any, Unit]( - Field("testQuery", StringType, resolve = _ ⇒ "test"))), + Field("testQuery", StringType, resolve = _ => "test"))), mutation = Some(ObjectType("Mutation", fields[Any, Unit]( - Field("testMut", StringType, resolve = _ ⇒ "test"))))) + Field("testMut", StringType, resolve = _ => "test"))))) val schema = existingSchema.extend(schemaAst, builder.validateSchemaWithException(schemaAst)) @@ -344,9 +344,9 @@ class ResolverBasedAstSchemaBuilderSpec extends WordSpec with Matchers with Futu """ Executor.execute(schema, query).await should be ( - Map("data" → Map( - "testMut" → "test", - "eat" → "tasty #00FF00 #FF0000"))) + Map("data" -> Map( + "testMut" -> "test", + "eat" -> "tasty #00FF00 #FF0000"))) val queryWithVars = gql""" @@ -357,40 +357,40 @@ class ResolverBasedAstSchemaBuilderSpec extends WordSpec with Matchers with Futu """ val vars = InputUnmarshaller.mapVars( - "color1" → "RED", - "color2" → "BLUE", - "fruit" → Map( - "name" → "Banana", - "color" → "GREEN")) + "color1" -> "RED", + "color2" -> "BLUE", + "fruit" -> Map( + "name" -> "Banana", + "color" -> "GREEN")) Executor.execute(schema, queryWithVars, variables = vars).await should be ( - Map("data" → Map( - "eat" → "tasty #0000FF #FF0000", - "more" → "tasty #FF0000 #00FF00"))) + Map("data" -> Map( + "eat" -> "tasty #0000FF #FF0000", + "more" -> "tasty #FF0000 #00FF00"))) } "resolve fields based on the dynamic directives" in { import sangria.marshalling.sprayJson._ val builder = resolverBased[Any]( - DynamicDirectiveResolver[Any, JsValue]("add", c ⇒ c.args.asJsObject.fields("value") match { - case JsString(str) ⇒ + DynamicDirectiveResolver[Any, JsValue]("add", c => c.args.asJsObject.fields("value") match { + case JsString(str) => c.lastValue match { - case Some(last) ⇒ last.map(_ + str) - case None ⇒ str + case Some(last) => last.map(_ + str) + case None => str } - case _ ⇒ c.lastValue.getOrElse("") + case _ => c.lastValue.getOrElse("") }), DynamicDirectiveResolver[Any, JsValue]("addFinal", - c ⇒ { + c => { val finalValue = c.ctx.arg[String]("final") c.lastValue match { - case Some(last) ⇒ last.map(_ + finalValue) - case None ⇒ finalValue + case Some(last) => last.map(_ + finalValue) + case None => finalValue } }, - complexity = Some(_ ⇒ (_, _, _) ⇒ 100.0))) + complexity = Some(_ => (_, _, _) => 100.0))) val schemaAst = gql""" @@ -411,7 +411,7 @@ class ResolverBasedAstSchemaBuilderSpec extends WordSpec with Matchers with Futu """ val complexity = new AtomicInteger(0) - val reducer = QueryReducer.measureComplexity[Any]((c, _) ⇒ complexity.set(c.toInt)) + val reducer = QueryReducer.measureComplexity[Any]((c, _) => complexity.set(c.toInt)) Executor.execute(schema, query, queryReducers = reducer :: Nil).await should be ( """ @@ -429,25 +429,25 @@ class ResolverBasedAstSchemaBuilderSpec extends WordSpec with Matchers with Futu "resolve fields based on names" in { val builder = resolverBased[Unit]( FieldResolver { - case (TypeName("Query"), field @ FieldName(fieldName)) if fieldName startsWith "test" ⇒ - c ⇒ c.arg[Int](field.arguments.head.name) + 1 + case (TypeName("Query"), field @ FieldName(fieldName)) if fieldName startsWith "test" => + c => c.arg[Int](field.arguments.head.name) + 1 }, FieldResolver.map( - "Query" → Map( - "a" → (_ ⇒ "a value"), - "b" → (_ ⇒ "b value"))), + "Query" -> Map( + "a" -> (_ => "a value"), + "b" -> (_ => "b value"))), ExistingFieldResolver { - case (_, _, field) if field.name startsWith "existing" ⇒ - c ⇒ "replacement" + case (_, _, field) if field.name startsWith "existing" => + c => "replacement" }, ExistingFieldResolver.map( - "Query" → Map( - "c" → (_ ⇒ "c value")))) + "Query" -> Map( + "c" -> (_ => "c value")))) val existingSchema = Schema(ObjectType("Query", fields[Unit, Unit]( - Field("simple", StringType, resolve = _ ⇒ "value"), - Field("c", StringType, resolve = _ ⇒ "c value"), - Field("existingField", StringType, resolve = _ ⇒ "foo")))) + Field("simple", StringType, resolve = _ => "value"), + Field("c", StringType, resolve = _ => "c value"), + Field("existingField", StringType, resolve = _ => "foo")))) val schemaAst = gql""" @@ -475,14 +475,14 @@ class ResolverBasedAstSchemaBuilderSpec extends WordSpec with Matchers with Futu """ Executor.execute(schema, query).await should be (Map( - "data" → Map( - "simple" → "value", - "existingField" → "replacement", - "testOne" → 124, - "testTwo" → 2, - "a" → "a value", - "b" → "b value", - "c" → "c value"))) + "data" -> Map( + "simple" -> "value", + "existingField" -> "replacement", + "testOne" -> 124, + "testTwo" -> 2, + "a" -> "a value", + "b" -> "b value", + "c" -> "c value"))) } "support instance check" in { @@ -490,11 +490,11 @@ class ResolverBasedAstSchemaBuilderSpec extends WordSpec with Matchers with Futu val builder = resolverBased[Unit]( InstanceCheck.simple { - case value: JsValue if value.asJsObject.fields.contains("type") ⇒ + case value: JsValue if value.asJsObject.fields.contains("type") => value.asJsObject.fields("type").asInstanceOf[JsString].value - case value: JsValue if value.asJsObject.fields.contains("name") ⇒ + case value: JsValue if value.asJsObject.fields.contains("name") => "Dog" - case _ ⇒ + case _ => "Cat" }, AnyFieldResolver.defaultInput[Unit, JsValue]) @@ -746,9 +746,9 @@ class ResolverBasedAstSchemaBuilderSpec extends WordSpec with Matchers with Futu val violations = builder.validateSchema(schemaDocument) assertViolations(violations, - "Directive 'objectDir' may not be used on field definition." → Seq(Pos(7, 29)), - "Expected type 'String', found 'true'. String value expected" → Seq(Pos(6, 46)), - "Directive 'objectDir' may not be used on input object type extension definition." → Seq(Pos(10, 28))) + "Directive 'objectDir' may not be used on field definition." -> Seq(Pos(7, 29)), + "Expected type 'String', found 'true'. String value expected" -> Seq(Pos(6, 46)), + "Directive 'objectDir' may not be used on input object type extension definition." -> Seq(Pos(10, 28))) } "support generic InputTypeResolver/OutputTypeResolver" in { @@ -758,20 +758,20 @@ class ResolverBasedAstSchemaBuilderSpec extends WordSpec with Matchers with Futu case object EmptyIdError extends Exception("ID cannot be an empty string") with UserFacingError val MyIdType = ScalarAlias[String, String](IDType, - toScalar = s ⇒ + toScalar = s => if (s.trim.isEmpty) throw EmptyIdError // sanity check else s, - fromScalar = id ⇒ + fromScalar = id => if (id.trim.isEmpty) Left(EmptyIdViolation) else Right(id)) val builder = resolverBased[Unit]( InputTypeResolver { - case c if c.definition.valueType.namedType.name == "ID" ⇒ + case c if c.definition.valueType.namedType.name == "ID" => c.inputType(c.definition.valueType, MyIdType) }, OutputTypeResolver { - case c if c.fieldDefinition.fieldType.namedType.name == "ID" ⇒ + case c if c.fieldDefinition.fieldType.namedType.name == "ID" => c.outputType(c.fieldDefinition.fieldType, MyIdType) }, AnyFieldResolver.defaultInput[Unit, JsValue]) @@ -811,12 +811,12 @@ class ResolverBasedAstSchemaBuilderSpec extends WordSpec with Matchers with Futu """.parseJson val vars = InputUnmarshaller.mapVars( - "id" → " ") + "id" -> " ") checkContainsViolations( Executor.execute(schema, query, variables = vars, root = data).await, - "Expected type 'ID!', found '\"\"'. ID cannot be an empty string" → Seq(Pos(3, 35)), - "Expected type 'ID!', found '\" \"'. ID cannot be an empty string" → Seq(Pos(3, 39))) + "Expected type 'ID!', found '\"\"'. ID cannot be an empty string" -> Seq(Pos(3, 35)), + "Expected type 'ID!', found '\" \"'. ID cannot be an empty string" -> Seq(Pos(3, 39))) val query1 = gql""" diff --git a/src/test/scala/sangria/schema/SchemaComparatorSpec.scala b/src/test/scala/sangria/schema/SchemaComparatorSpec.scala index 97f7d0a4..7418081e 100644 --- a/src/test/scala/sangria/schema/SchemaComparatorSpec.scala +++ b/src/test/scala/sangria/schema/SchemaComparatorSpec.scala @@ -12,13 +12,13 @@ import scala.reflect.ClassTag class SchemaComparatorSpec extends WordSpec with Matchers { "SchemaComparator" should { val QueryType = ObjectType("Query", fields[Unit, Unit]( - Field("field1", OptionType(StringType), resolve = _ ⇒ "foo"))) + Field("field1", OptionType(StringType), resolve = _ => "foo"))) "should detect if a type was removed or added" in { val type1 = ObjectType("Type1", fields[Unit, Unit]( - Field("field1", OptionType(StringType), resolve = _ ⇒ "foo"))) + Field("field1", OptionType(StringType), resolve = _ => "foo"))) val type2 = ObjectType("Type2", fields[Unit, Unit]( - Field("field1", OptionType(StringType), resolve = _ ⇒ "foo"))) + Field("field1", OptionType(StringType), resolve = _ => "foo"))) val oldSchema = Schema(QueryType, additionalTypes = type1 :: type2 :: Nil) val newSchema = Schema(QueryType, additionalTypes = type2 :: Nil) @@ -634,16 +634,16 @@ class SchemaComparatorSpec extends WordSpec with Matchers { } def assertChanges(actualChanges: Vector[SchemaChange], expectedChanges: (Class[_], String, Boolean)*) = { - val actualRendered = actualChanges.map(c ⇒ s" * ${c.getClass.getSimpleName}: ${c.description}${if (c.breakingChange) " (breaking)" else ""}").mkString("\n") + val actualRendered = actualChanges.map(c => s" * ${c.getClass.getSimpleName}: ${c.description}${if (c.breakingChange) " (breaking)" else ""}").mkString("\n") withClue(s"Actual changes:\n$actualRendered\n") { actualChanges should have size expectedChanges.size - val notFound = expectedChanges.filter(expectedChange ⇒ - !actualChanges.exists(ac ⇒ expectedChange._1.isAssignableFrom(ac.getClass) && ac.description == expectedChange._2 && ac.breakingChange == expectedChange._3)) + val notFound = expectedChanges.filter(expectedChange => + !actualChanges.exists(ac => expectedChange._1.isAssignableFrom(ac.getClass) && ac.description == expectedChange._2 && ac.breakingChange == expectedChange._3)) if (notFound.nonEmpty) { - val str = notFound.map(nf ⇒ s" * ${nf._1.getSimpleName}: ${nf._2}${if (nf._3) " (breaking)" else ""}").mkString("\n") + val str = notFound.map(nf => s" * ${nf._1.getSimpleName}: ${nf._2}${if (nf._3) " (breaking)" else ""}").mkString("\n") fail(s"Changes not found:\n $str") } diff --git a/src/test/scala/sangria/schema/SchemaConstraintsSpec.scala b/src/test/scala/sangria/schema/SchemaConstraintsSpec.scala index 3b43390c..5ea30389 100644 --- a/src/test/scala/sangria/schema/SchemaConstraintsSpec.scala +++ b/src/test/scala/sangria/schema/SchemaConstraintsSpec.scala @@ -19,14 +19,14 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { InputField("y", FloatType))) val ouputType = ObjectType("Point", fields[Unit, Unit]( - Field("x", FloatType, resolve = _ ⇒ 1.234), - Field("y", FloatType, resolve = _ ⇒ 1.234), - Field("z", FloatType, resolve = _ ⇒ 1.234))) + Field("x", FloatType, resolve = _ => 1.234), + Field("y", FloatType, resolve = _ => 1.234), + Field("z", FloatType, resolve = _ => 1.234))) val queryType = ObjectType("Query", fields[Unit, Unit]( Field("foo", OptionType(ouputType), arguments = Argument("points", ListInputType(inputType)) :: Nil, - resolve = _ ⇒ None))) + resolve = _ => None))) val error = intercept [SchemaValidationException] (Schema(queryType)) @@ -42,18 +42,18 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { val scalarType = ScalarType[String]("Point", coerceOutput = valueOutput, coerceUserInput = { - case s: String ⇒ Right(s) - case _ ⇒ Left(StringCoercionViolation) + case s: String => Right(s) + case _ => Left(StringCoercionViolation) }, coerceInput = { - case ast.StringValue(s, _, _, _, _) ⇒ Right(s) - case _ ⇒ Left(StringCoercionViolation) + case ast.StringValue(s, _, _, _, _) => Right(s) + case _ => Left(StringCoercionViolation) }) val queryType = ObjectType("Query", fields[Unit, Unit]( Field("foo", OptionType(scalarType), arguments = Argument("points", ListInputType(inputType)) :: Nil, - resolve = _ ⇒ None))) + resolve = _ => None))) val error = intercept [SchemaValidationException] (Schema(queryType)) @@ -69,19 +69,19 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { val scalarType = ScalarType[String]("__Point", coerceOutput = valueOutput, coerceUserInput = { - case s: String ⇒ Right(s) - case _ ⇒ Left(StringCoercionViolation) + case s: String => Right(s) + case _ => Left(StringCoercionViolation) }, coerceInput = { - case ast.StringValue(s, _, _, _, _) ⇒ Right(s) - case _ ⇒ Left(StringCoercionViolation) + case ast.StringValue(s, _, _, _, _) => Right(s) + case _ => Left(StringCoercionViolation) }) val bazType = InterfaceType("__Baz", fields[Unit, Unit]( - Field("id", IntType, resolve = _ ⇒ 1))) + Field("id", IntType, resolve = _ => 1))) val barType = ObjectType("__Bar", interfaces[Unit, Unit](bazType), fields[Unit, Unit]( - Field("foo", OptionType(scalarType),resolve = _ ⇒ None))) + Field("foo", OptionType(scalarType),resolve = _ => None))) val colorType = EnumType("__Color", values = List( EnumValue("RED", value = 1), @@ -89,9 +89,9 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { EnumValue("__BLUE", value = 3))) val queryType = ObjectType("Query", fields[Unit, Unit]( - Field("__foo", OptionType(scalarType),resolve = _ ⇒ None), - Field("bar", OptionType(barType),resolve = _ ⇒ None), - Field("color", OptionType(colorType),resolve = _ ⇒ None))) + Field("__foo", OptionType(scalarType),resolve = _ => None), + Field("bar", OptionType(barType),resolve = _ => None), + Field("color", OptionType(colorType),resolve = _ => None))) val error = intercept [SchemaValidationException] (Schema(queryType, additionalTypes = inputType :: Nil)) @@ -115,7 +115,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { EnumValue("null", value = 4))) val queryType = ObjectType("Query", fields[Unit, Unit]( - Field("color", OptionType(colorType), resolve = _ ⇒ None))) + Field("color", OptionType(colorType), resolve = _ => None))) val error = intercept [SchemaValidationException] (Schema(queryType)) @@ -133,7 +133,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { val queryType = ObjectType("Query", fields[Unit, Unit]( Field("foo", OptionType(outType), arguments = Argument("bar", inputType) :: Nil, - resolve = _ ⇒ ()))) + resolve = _ => ()))) val error = intercept [SchemaValidationException] (Schema(queryType)) @@ -149,8 +149,8 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { implicit val barBazType = deriveObjectType[Unit, test.bar.Baz]() val queryType = ObjectType("Query", fields[Unit, Unit]( - Field("fooBaz", OptionType(fooBazType), resolve = _ ⇒ Some(test.foo.Baz(1))), - Field("barBaz", barBazType, resolve = _ ⇒ test.bar.Baz("2", 3.0)) + Field("fooBaz", OptionType(fooBazType), resolve = _ => Some(test.foo.Baz(1))), + Field("barBaz", barBazType, resolve = _ => test.bar.Baz("2", 3.0)) )) val error = intercept [SchemaValidationException] (Schema(queryType)) @@ -164,8 +164,8 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { implicit val barBazType = deriveObjectType[Unit, test.bar.Baz](ObjectTypeName("BazWithNewName")) val queryType = ObjectType("Query", fields[Unit, Unit]( - Field("fooBaz", OptionType(fooBazType), resolve = _ ⇒ Some(test.foo.Baz(1))), - Field("barBaz", barBazType, resolve = _ ⇒ test.bar.Baz("2", 3.0)) + Field("fooBaz", OptionType(fooBazType), resolve = _ => Some(test.foo.Baz(1))), + Field("barBaz", barBazType, resolve = _ => test.bar.Baz("2", 3.0)) )) Schema(queryType) // Should not throw any SchemaValidationExceptions @@ -200,7 +200,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { union BadUnion """, - "Union type 'BadUnion' must define one or more member types." → Seq(Pos(6, 9))) + "Union type 'BadUnion' must define one or more member types." -> Seq(Pos(6, 9))) "rejects a Union type with duplicated member type" in invalidSchema( { @@ -230,7 +230,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { ast1 + ast2 }, - "Union type 'BadUnion' can only include type 'TypeA' once." → Seq(Pos(7, 17), Pos(10, 37))) + "Union type 'BadUnion' can only include type 'TypeA' once." -> Seq(Pos(7, 17), Pos(10, 37))) "rejects a Union type with non-Object members types" in invalidSchema( graphql""" @@ -251,7 +251,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { | String | TypeB """, - "Type 'String' is not an object type." → Seq(Pos(16, 13))) + "Type 'String' is not an object type." -> Seq(Pos(16, 13))) } "Type System: Input Objects must have fields" should { @@ -274,7 +274,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { input SomeInputObject """, - "Input type 'SomeInputObject' must define one or more fields." → Seq(Pos(6, 9))) + "Input type 'SomeInputObject' must define one or more fields." -> Seq(Pos(6, 9))) "rejects an Input Object type with incorrectly typed fields" in invalidSchema( graphql""" @@ -294,7 +294,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { goodInputObject: SomeInputObject } """, - "Type 'SomeObject' is not an input type type." → Seq(Pos(13, 22))) + "Type 'SomeObject' is not an input type type." -> Seq(Pos(13, 22))) } "Type System: Enum types must be well defined" should { @@ -306,7 +306,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { enum SomeEnum """, - "Enum type 'SomeEnum' must define one or more values." → Seq(Pos(6, 9))) + "Enum type 'SomeEnum' must define one or more values." -> Seq(Pos(6, 9))) "rejects an Enum type with duplicate values" in invalidSchema( graphql""" @@ -319,7 +319,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { SOME_VALUE } """, - "Enum type 'SomeEnum' can include value 'SOME_VALUE' only once." → Seq(Pos(7, 11), Pos(8, 11))) + "Enum type 'SomeEnum' can include value 'SOME_VALUE' only once." -> Seq(Pos(7, 11), Pos(8, 11))) } "Type System: Object fields must have output types" should { @@ -333,7 +333,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { field: String } """, - "Type 'SomeInputObject' is not an output type type." → Seq(Pos(3, 19))) + "Type 'SomeInputObject' is not an output type type." -> Seq(Pos(3, 19))) } "Type System: Objects can only implement unique interfaces" should { @@ -351,7 +351,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { field: String } """, - "Type 'SomeInputObject' is not an output type type." → Seq(Pos(10, 35))) + "Type 'SomeInputObject' is not an output type type." -> Seq(Pos(10, 35))) "rejects an Object implementing the same interface twice" in invalidSchema( graphql""" @@ -367,7 +367,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { field: String } """, - "Object type 'AnotherObject' can implement interface 'AnotherInterface' only once." → Seq(Pos(10, 39), Pos(10, 58))) + "Object type 'AnotherObject' can implement interface 'AnotherInterface' only once." -> Seq(Pos(10, 39), Pos(10, 58))) "rejects an Object implementing the same interface twice due to extension" in invalidSchema( graphql""" @@ -385,7 +385,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { extend type AnotherObject implements AnotherInterface """, - "Object type 'AnotherObject' can implement interface 'AnotherInterface' only once." → Seq(Pos(10, 39), Pos(14, 46))) + "Object type 'AnotherObject' can implement interface 'AnotherInterface' only once." -> Seq(Pos(10, 39), Pos(14, 46))) "rejects an Object implementing the extended interface due to missing field (via extension)" in invalidSchema( buildSchema(graphql""" @@ -403,7 +403,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { """).extend(graphql""" extend type AnotherObject implements AnotherInterface """), - "Object type 'AnotherObject' can implement interface 'AnotherInterface' only once." → Seq(Pos(10, 39), Pos(2, 46))) + "Object type 'AnotherObject' can implement interface 'AnotherInterface' only once." -> Seq(Pos(10, 39), Pos(2, 46))) } "Type System: Interface extensions should be valid" should { @@ -429,7 +429,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { newField: String } """), - "AnotherInterface.newField expects argument 'test', but AnotherObject.newField does not provide it." → Seq(Pos(3, 20), Pos(7, 11))) + "AnotherInterface.newField expects argument 'test', but AnotherObject.newField does not provide it." -> Seq(Pos(3, 20), Pos(7, 11))) "rejects Objects implementing the extended interface due to mismatching interface type" in invalidSchema( buildSchema(graphql""" @@ -461,7 +461,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { newInterfaceField: MismatchingInterface } """), - "AnotherInterface.newInterfaceField expects type 'NewInterface', but AnotherObject.newInterfaceField provides type 'MismatchingInterface'." → Seq(Pos(15, 11), Pos(3, 11))) + "AnotherInterface.newInterfaceField expects type 'NewInterface', but AnotherObject.newInterfaceField provides type 'MismatchingInterface'." -> Seq(Pos(15, 11), Pos(3, 11))) } "Type System: Interface fields must have output types" should { @@ -479,7 +479,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { foo: String } """, - "Type 'SomeInputObject' is not an output type type." → Seq(Pos(7, 18))) + "Type 'SomeInputObject' is not an output type type." -> Seq(Pos(7, 18))) } "Type System: Field arguments must have input types" should { @@ -493,7 +493,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { foo: String } """, - "Type 'SomeObject' is not an input type type." → Seq(Pos(3, 21))) + "Type 'SomeObject' is not an input type type." -> Seq(Pos(3, 21))) } "Type System: Input Object fields must have input types" should { @@ -511,7 +511,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { bar: String } """, - "Type 'SomeObject' is not an input type type." → Seq(Pos(7, 16))) + "Type 'SomeObject' is not an input type type." -> Seq(Pos(7, 16))) } "Objects must adhere to Interface they implement" should { @@ -575,7 +575,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { field(input: String): Int } """, - "AnotherInterface.field expects type 'String', but AnotherObject.field provides type 'Int'." → Seq(Pos(11, 11), Pos(7, 11))) + "AnotherInterface.field expects type 'String', but AnotherObject.field provides type 'Int'." -> Seq(Pos(11, 11), Pos(7, 11))) "rejects an Object with a differently typed Interface field" in invalidSchema( graphql""" @@ -594,7 +594,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { field: B } """, - "AnotherInterface.field expects type 'A', but AnotherObject.field provides type 'B'." → Seq(Pos(14, 11), Pos(10, 11))) + "AnotherInterface.field expects type 'A', but AnotherObject.field provides type 'B'." -> Seq(Pos(14, 11), Pos(10, 11))) "accepts an Object with a subtyped Interface field (interface)" in validSchema( graphql""" @@ -646,7 +646,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { field: String } """, - "AnotherInterface.field expects argument 'input', but AnotherObject.field does not provide it." → Seq(Pos(7, 17), Pos(11, 11))) + "AnotherInterface.field expects argument 'input', but AnotherObject.field does not provide it." -> Seq(Pos(7, 17), Pos(11, 11))) "rejects an Object with an incorrectly typed Interface argument" in invalidSchema( graphql""" @@ -662,7 +662,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { field(input: Int): String } """, - "AnotherInterface.field(input) expects type 'String', but AnotherObject.field(input) provides type 'Int'." → Seq(Pos(7, 17), Pos(11, 17))) + "AnotherInterface.field(input) expects type 'String', but AnotherObject.field(input) provides type 'Int'." -> Seq(Pos(7, 17), Pos(11, 17))) "rejects an Object with both an incorrectly typed field and argument" in invalidSchema( graphql""" @@ -678,7 +678,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { field(input: Int): Int } """, - "AnotherInterface.field expects type 'String', but AnotherObject.field provides type 'Int'." → Seq(Pos(11, 11), Pos(7, 11))) + "AnotherInterface.field expects type 'String', but AnotherObject.field provides type 'Int'." -> Seq(Pos(11, 11), Pos(7, 11))) "rejects an Object which implements an Interface field along with additional required arguments" in invalidSchema( graphql""" @@ -694,7 +694,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { field(input: String, anotherInput: String!): String } """, - "AnotherObject.field(anotherInput) is of required type 'String!', but is not also provided by the interface AnotherInterface.field." → Seq(Pos(11, 32), Pos(7, 11))) + "AnotherObject.field(anotherInput) is of required type 'String!', but is not also provided by the interface AnotherInterface.field." -> Seq(Pos(11, 32), Pos(7, 11))) "accepts an Object with an equivalently wrapped Interface field type" in validSchema( graphql""" @@ -725,7 +725,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { field: String } """, - "AnotherInterface.field expects type '[String]', but AnotherObject.field provides type 'String'." → Seq(Pos(11, 11), Pos(7, 11))) + "AnotherInterface.field expects type '[String]', but AnotherObject.field provides type 'String'." -> Seq(Pos(11, 11), Pos(7, 11))) "rejects an Object with a list Interface field non-list type" in invalidSchema( graphql""" @@ -741,7 +741,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { field: [String] } """, - "AnotherInterface.field expects type 'String', but AnotherObject.field provides type '[String]'." → Seq(Pos(11, 11), Pos(7, 11))) + "AnotherInterface.field expects type 'String', but AnotherObject.field provides type '[String]'." -> Seq(Pos(11, 11), Pos(7, 11))) "accepts an Object with a subset non-null Interface field type" in validSchema( graphql""" @@ -772,7 +772,7 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { field: String } """, - "AnotherInterface.field expects type 'String!', but AnotherObject.field provides type 'String'." → Seq(Pos(11, 11), Pos(7, 11))) + "AnotherInterface.field expects type 'String!', but AnotherObject.field provides type 'String'." -> Seq(Pos(11, 11), Pos(7, 11))) } def buildSchema(document: ast.Document) = { @@ -784,16 +784,16 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { def invalidSchema(document: ast.Document, expected: (String, Seq[Pos])*): Unit = invalidSchema(buildSchema(document), expected: _*) - def invalidSchema(schema: ⇒ Schema[_, _], expected: (String, Seq[Pos])*): Unit = { + def invalidSchema(schema: => Schema[_, _], expected: (String, Seq[Pos])*): Unit = { (Try(schema): @unchecked) match { - case Success(_) ⇒ fail("Schema was built successfully") - case Failure(e: WithViolations) ⇒ + case Success(_) => fail("Schema was built successfully") + case Failure(e: WithViolations) => val violationsStr = "Actual violations:\n\n" + e.violations.zipWithIndex.map { - case (v, idx) ⇒ + case (v, idx) => val helperStr = v match { - case n: AstNodeLocation ⇒ " \"" + n.simpleErrorMessage + "\" → Seq(" + n.locations.map(l ⇒ s"Pos(${l.line}, ${l.column})").mkString(", ") + ")" - case n ⇒ n.errorMessage + case n: AstNodeLocation => " \"" + n.simpleErrorMessage + "\" -> Seq(" + n.locations.map(l => s"Pos(${l.line}, ${l.column})").mkString(", ") + ")" + case n => n.errorMessage } s"(${idx + 1}) " + v.errorMessage + "\n\n" + helperStr @@ -802,14 +802,14 @@ class SchemaConstraintsSpec extends WordSpec with Matchers { withClue(violationsStr) { e.violations should have size expected.size - expected foreach { case (expected, pos) ⇒ - e.violations exists { error ⇒ + expected foreach { case (expected, pos) => + e.violations exists { error => val message = error.errorMessage message.contains(expected) && { error match { - case n: AstNodeLocation ⇒ n.locations.map(p ⇒ Pos(p.line, p.column)) == pos - case _ ⇒ false + case n: AstNodeLocation => n.locations.map(p => Pos(p.line, p.column)) == pos + case _ => false } } } should be(true) diff --git a/src/test/scala/sangria/schema/SchemaDefinitionSpec.scala b/src/test/scala/sangria/schema/SchemaDefinitionSpec.scala index 5634ff08..f7350ff4 100644 --- a/src/test/scala/sangria/schema/SchemaDefinitionSpec.scala +++ b/src/test/scala/sangria/schema/SchemaDefinitionSpec.scala @@ -17,26 +17,26 @@ class SchemaDefinitionSpec extends WordSpec with Matchers with FutureResultSuppo val CustomScalarType = ScalarType[String]("CustomScalar", coerceOutput = valueOutput, coerceUserInput = { - case s: String ⇒ Right(s) - case _ ⇒ Left(StringCoercionViolation) + case s: String => Right(s) + case _ => Left(StringCoercionViolation) }, coerceInput = { - case ast.StringValue(s, _, _, _, _) ⇒ Right(s) - case _ ⇒ Left(StringCoercionViolation) + case ast.StringValue(s, _, _, _, _) => Right(s) + case _ => Left(StringCoercionViolation) }) val NamedType = InterfaceType("Named", fields[Unit, Unit]( - Field("name", OptionType(StringType), resolve = _ ⇒ None), - Field("custom", OptionType(CustomScalarType), resolve = _ ⇒ None))) + Field("name", OptionType(StringType), resolve = _ => None), + Field("custom", OptionType(CustomScalarType), resolve = _ => None))) val DogType = ObjectType("Dog", interfaces[Unit, Unit](NamedType), fields[Unit, Unit]( - Field("barks", OptionType(BooleanType), resolve = _ ⇒ None))) + Field("barks", OptionType(BooleanType), resolve = _ => None))) val CatType = ObjectType("Cat", interfaces[Unit, Unit](NamedType), fields[Unit, Unit]( - Field("meows", OptionType(BooleanType), resolve = _ ⇒ None))) + Field("meows", OptionType(BooleanType), resolve = _ => None))) val queryType = ObjectType("Query", fields[Unit, Unit]( - Field("foo", OptionType(StringType), resolve = _ ⇒ None))) + Field("foo", OptionType(StringType), resolve = _ => None))) val schema = Schema(queryType, additionalTypes = DogType :: CatType :: Nil) @@ -46,7 +46,7 @@ class SchemaDefinitionSpec extends WordSpec with Matchers with FutureResultSuppo schema.types.keySet should be (fromIntro) - List(schema.types.keySet, fromIntro) foreach { typeNames ⇒ + List(schema.types.keySet, fromIntro) foreach { typeNames => typeNames should ( contain("Named") and contain("Dog") and diff --git a/src/test/scala/sangria/schema/SchemaExtensionSpec.scala b/src/test/scala/sangria/schema/SchemaExtensionSpec.scala index 809fce77..8ce898d6 100644 --- a/src/test/scala/sangria/schema/SchemaExtensionSpec.scala +++ b/src/test/scala/sangria/schema/SchemaExtensionSpec.scala @@ -20,25 +20,25 @@ class SchemaExtensionSpec extends WordSpec with Matchers with FutureResultSuppor case class Foo(name: Option[String], some: Option[SomeInterface], tree: List[Option[Foo]]) extends SomeInterface case class Bar(name: Option[String], some: Option[SomeInterface], foo: Option[Foo]) extends SomeInterface - val SomeInterfaceType: InterfaceType[Unit, SomeInterface] = InterfaceType("SomeInterface", () ⇒ fields( + val SomeInterfaceType: InterfaceType[Unit, SomeInterface] = InterfaceType("SomeInterface", () => fields( Field("name", OptionType(StringType), resolve = _.value.name), Field("some", OptionType(SomeInterfaceType), resolve = _.value.some) )) - val FooType: ObjectType[Unit, Foo] = ObjectType("Foo", interfaces = interfaces(SomeInterfaceType), () ⇒ fields( + val FooType: ObjectType[Unit, Foo] = ObjectType("Foo", interfaces = interfaces(SomeInterfaceType), () => fields( Field("name", OptionType(StringType), resolve = _.value.name), Field("some", OptionType(SomeInterfaceType), resolve = _.value.some), Field("tree", ListType(OptionType(FooType)), resolve = _.value.tree) )) - val BarType: ObjectType[Unit, Bar] = ObjectType("Bar", interfaces = interfaces(SomeInterfaceType), () ⇒ fields( + val BarType: ObjectType[Unit, Bar] = ObjectType("Bar", interfaces = interfaces(SomeInterfaceType), () => fields( Field("name", OptionType(StringType), resolve = _.value.name), Field("some", OptionType(SomeInterfaceType), resolve = _.value.some), Field("foo", OptionType(FooType), resolve = _.value.foo) )) - val BizType = ObjectType("Biz", () ⇒ fields[Unit, Unit]( - Field("fizz", OptionType(StringType), resolve = _ ⇒ None) + val BizType = ObjectType("Biz", () => fields[Unit, Unit]( + Field("fizz", OptionType(StringType), resolve = _ => None) )) val SomeUnionType = UnionType("SomeUnion", types = FooType :: BizType :: Nil) @@ -50,12 +50,12 @@ class SchemaExtensionSpec extends WordSpec with Matchers with FutureResultSuppor val schema = Schema( query = ObjectType("Query", fields[Unit, Unit]( - Field("foo", OptionType(FooType), resolve = _ ⇒ Some(Foo(Some("foo"), None, Nil))), - Field("someUnion", OptionType(SomeUnionType), resolve = _ ⇒ None), - Field("someEnum", OptionType(SomeEnumType), resolve = _ ⇒ None), + Field("foo", OptionType(FooType), resolve = _ => Some(Foo(Some("foo"), None, Nil))), + Field("someUnion", OptionType(SomeUnionType), resolve = _ => None), + Field("someEnum", OptionType(SomeEnumType), resolve = _ => None), Field("someInterface", OptionType(SomeInterfaceType), arguments = Argument("id", IDType) :: Nil, - resolve = _ ⇒ Some(Foo(Some("a"), Some(Bar(Some("b"), None, Some(Foo(Some("c"), None, Nil)))), List(None, Some(Foo(Some("d"), None, Nil)))))) + resolve = _ => Some(Foo(Some("a"), Some(Bar(Some("b"), None, Some(Foo(Some("c"), None, Nil)))), List(None, Some(Foo(Some("d"), None, Nil)))))) )), additionalTypes = BarType :: Nil) @@ -96,8 +96,8 @@ class SchemaExtensionSpec extends WordSpec with Matchers with FutureResultSuppor schema = schema.extend(ast), data = (), query = "{ newField }", - expectedData = Map("newField" → null), - expectedErrorStrings = List(DefaultIntrospectionSchemaBuilder.MaterializedSchemaErrorMessage → List(Pos(1, 3)))) + expectedData = Map("newField" -> null), + expectedErrorStrings = List(DefaultIntrospectionSchemaBuilder.MaterializedSchemaErrorMessage -> List(Pos(1, 3)))) } "extends objects by adding new fields" in { @@ -270,16 +270,16 @@ class SchemaExtensionSpec extends WordSpec with Matchers with FutureResultSuppor "extends objects by adding new fields with not yet used types" in { val ProductType = InterfaceType("Product", fields[Unit, Unit]( - Field("name", StringType, resolve = _ ⇒ "some name") + Field("name", StringType, resolve = _ => "some name") )) val MagicPotionType = ObjectType("MagicPotion", interfaces[Unit, Unit](ProductType), fields[Unit, Unit]( - Field("size", IntType, resolve = _ ⇒ 1) + Field("size", IntType, resolve = _ => 1) )) val schemaWithPotion = Schema( query = ObjectType("Query", fields[Unit, Unit]( - Field("foo", OptionType(FooType), resolve = _ ⇒ Some(Foo(Some("foo"), None, Nil))))), + Field("foo", OptionType(FooType), resolve = _ => Some(Foo(Some("foo"), None, Nil))))), additionalTypes = BarType :: MagicPotionType :: ProductType :: Nil) val ast = @@ -688,11 +688,11 @@ class SchemaExtensionSpec extends WordSpec with Matchers with FutureResultSuppor "may extend mutations and subscriptions" in { val mutationSchema = Schema( query = ObjectType("Query", fields[Unit, Unit]( - Field("queryField", StringType, resolve = _ ⇒ ""))), + Field("queryField", StringType, resolve = _ => ""))), mutation = Some(ObjectType("Mutation", fields[Unit, Unit]( - Field("mutationField", StringType, resolve = _ ⇒ "")))), + Field("mutationField", StringType, resolve = _ => "")))), subscription = Some(ObjectType("Subscription", fields[Unit, Unit]( - Field("subscriptionField", StringType, resolve = _ ⇒ ""))))) + Field("subscriptionField", StringType, resolve = _ => ""))))) val ast = graphql""" @@ -865,52 +865,52 @@ class SchemaExtensionSpec extends WordSpec with Matchers with FutureResultSuppor definition: ast.FieldDefinition, mat: AstSchemaMaterializer[Unit]) = if (definition.name == "animal1") - _ ⇒ Map("type" → "Cat", "name" → "foo", "age" → Some(10)) + _ => Map("type" -> "Cat", "name" -> "foo", "age" -> Some(10)) else if (definition.name == "animal2") - _ ⇒ Map("type" → "Dog", "name" → "bar", "nickname" → Some("baz")) + _ => Map("type" -> "Dog", "name" -> "bar", "nickname" -> Some("baz")) else if (definition.name == "special") - _ ⇒ Map("name" → "Fooo", "some" → None, "custom" → 123) + _ => Map("name" -> "Fooo", "some" -> None, "custom" -> 123) else _.value.asInstanceOf[Map[String, Any]](definition.name) override def objectTypeInstanceCheck(origin: MatOrigin, definition: ObjectTypeDefinition, extensions: List[ast.ObjectTypeExtensionDefinition]) = - Some((value, clazz) ⇒ value match { - case v: Map[_, _] if definition.name == "Hello" ⇒ true - case v : Map[String, _] @unchecked if v contains "type" ⇒ value.asInstanceOf[Map[String, Any]]("type") == definition.name - case _ ⇒ false + Some((value, clazz) => value match { + case v: Map[_, _] if definition.name == "Hello" => true + case v : Map[String, _] @unchecked if v contains "type" => value.asInstanceOf[Map[String, Any]]("type") == definition.name + case _ => false }) override def extendedObjectTypeInstanceCheck(origin: MatOrigin, tpe: ObjectType[Unit, _], extensions: List[ObjectTypeExtensionDefinition]) = - Some((value, clazz) ⇒ value match { - case v: Map[_, _] if tpe.name == "Hello" ⇒ true - case v if clazz.isAssignableFrom(v.getClass) ⇒ true - case _ ⇒ false + Some((value, clazz) => value match { + case v: Map[_, _] if tpe.name == "Hello" => true + case v if clazz.isAssignableFrom(v.getClass) => true + case _ => false }) override def scalarCoerceUserInput(definition: ast.ScalarTypeDefinition) = - value ⇒ definition.name match { - case "Custom" ⇒ value match { - case i: Int ⇒ Right(i) - case i: BigInt ⇒ Right(i.intValue) - case _ ⇒ Left(IntCoercionViolation) + value => definition.name match { + case "Custom" => value match { + case i: Int => Right(i) + case i: BigInt => Right(i.intValue) + case _ => Left(IntCoercionViolation) } - case _ ⇒ Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) + case _ => Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) } override def scalarCoerceInput(definition: ast.ScalarTypeDefinition) = - value ⇒ definition.name match { - case "Custom" ⇒ value match { - case ast.IntValue(i, _, _) ⇒ Right(i) - case ast.BigIntValue(i, _, _) ⇒ Right(i.intValue) - case _ ⇒ Left(IntCoercionViolation) + value => definition.name match { + case "Custom" => value match { + case ast.IntValue(i, _, _) => Right(i) + case ast.BigIntValue(i, _, _) => Right(i.intValue) + case _ => Left(IntCoercionViolation) } - case _ ⇒ Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) + case _ => Left(DefaultIntrospectionSchemaBuilder.MaterializedSchemaViolation) } override def scalarCoerceOutput(definition: ast.ScalarTypeDefinition) = - (coerced, _) ⇒ definition.name match { - case "Custom" ⇒ ast.IntValue(coerced.asInstanceOf[Int]) - case _ ⇒ throw DefaultIntrospectionSchemaBuilder.MaterializedSchemaException + (coerced, _) => definition.name match { + case "Custom" => ast.IntValue(coerced.asInstanceOf[Int]) + case _ => throw DefaultIntrospectionSchemaBuilder.MaterializedSchemaException } } @@ -945,20 +945,20 @@ class SchemaExtensionSpec extends WordSpec with Matchers with FutureResultSuppor } } """, - Map("data" → + Map("data" -> Map( - "foo" → Map("name" → "foo"), - "someInterface" → Map( - "name" → "a", - "some" → Map("name" → "b"), - "tree" → Vector(null, Map("name" → "d")), - "animal1" → Map("__typename" → "Cat", "name" → "foo"), - "animal2" → Map("__typename" → "Dog", "name" → "bar")), - "special" → Map( - "__typename" → "Hello", - "name" → "Fooo", - "some" → null, - "custom" → 123)))) + "foo" -> Map("name" -> "foo"), + "someInterface" -> Map( + "name" -> "a", + "some" -> Map("name" -> "b"), + "tree" -> Vector(null, Map("name" -> "d")), + "animal1" -> Map("__typename" -> "Cat", "name" -> "foo"), + "animal2" -> Map("__typename" -> "Dog", "name" -> "bar")), + "special" -> Map( + "__typename" -> "Hello", + "name" -> "Fooo", + "some" -> null, + "custom" -> 123)))) } } } diff --git a/src/test/scala/sangria/schema/TypeFieldConstraintsSpec.scala b/src/test/scala/sangria/schema/TypeFieldConstraintsSpec.scala index 2bf779b4..b9fc3f9c 100644 --- a/src/test/scala/sangria/schema/TypeFieldConstraintsSpec.scala +++ b/src/test/scala/sangria/schema/TypeFieldConstraintsSpec.scala @@ -10,24 +10,24 @@ class TypeFieldConstraintsSpec extends WordSpec with Matchers { "ObjectType" should { "allow unique fields" in { ObjectType("Test", fields[Unit, Unit]( - Field("a", StringType, resolve = _ ⇒ "foo"), - Field("b", StringType, resolve = _ ⇒ "foo"), - Field("c", StringType, resolve = _ ⇒ "foo") + Field("a", StringType, resolve = _ => "foo"), + Field("b", StringType, resolve = _ => "foo"), + Field("c", StringType, resolve = _ => "foo") )) - ObjectType("Test", () ⇒ fields[Unit, Unit]( - Field("a", StringType, resolve = _ ⇒ "foo"), - Field("b", StringType, resolve = _ ⇒ "foo"), - Field("c", StringType, resolve = _ ⇒ "foo") + ObjectType("Test", () => fields[Unit, Unit]( + Field("a", StringType, resolve = _ => "foo"), + Field("b", StringType, resolve = _ => "foo"), + Field("c", StringType, resolve = _ => "foo") )).fields } "disallow non-unique fields" in { val e = intercept [SchemaValidationException] { Schema(ObjectType("Test", fields[Unit, Unit]( - Field("a", StringType, resolve = _ ⇒ "foo"), - Field("b", StringType, resolve = _ ⇒ "foo"), - Field("a", StringType, resolve = _ ⇒ "foo") + Field("a", StringType, resolve = _ => "foo"), + Field("b", StringType, resolve = _ => "foo"), + Field("a", StringType, resolve = _ => "foo") ))) } @@ -37,7 +37,7 @@ class TypeFieldConstraintsSpec extends WordSpec with Matchers { "disallow invalid names" in { an [SchemaValidationException] should be thrownBy { Schema(ObjectType("Test-object", fields[Unit, Unit]( - Field("a", StringType, resolve = _ ⇒ "foo") + Field("a", StringType, resolve = _ => "foo") ))) } } @@ -46,28 +46,28 @@ class TypeFieldConstraintsSpec extends WordSpec with Matchers { "InterfaceType" should { "allow unique fields" in { InterfaceType("Test", fields[Unit, Unit]( - Field("a", StringType, resolve = _ ⇒ "foo"), - Field("b", StringType, resolve = _ ⇒ "foo"), - Field("c", StringType, resolve = _ ⇒ "foo") + Field("a", StringType, resolve = _ => "foo"), + Field("b", StringType, resolve = _ => "foo"), + Field("c", StringType, resolve = _ => "foo") )) - InterfaceType("Test", () ⇒ fields[Unit, Unit]( - Field("a", StringType, resolve = _ ⇒ "foo"), - Field("b", StringType, resolve = _ ⇒ "foo"), - Field("c", StringType, resolve = _ ⇒ "foo") + InterfaceType("Test", () => fields[Unit, Unit]( + Field("a", StringType, resolve = _ => "foo"), + Field("b", StringType, resolve = _ => "foo"), + Field("c", StringType, resolve = _ => "foo") )).fields } "disallow non-unique fields" in { an [SchemaValidationException] should be thrownBy { val TestType = InterfaceType("Test", fields[Unit, Unit]( - Field("a", StringType, resolve = _ ⇒ "foo"), - Field("b", StringType, resolve = _ ⇒ "foo"), - Field("a", StringType, resolve = _ ⇒ "foo") + Field("a", StringType, resolve = _ => "foo"), + Field("b", StringType, resolve = _ => "foo"), + Field("a", StringType, resolve = _ => "foo") )) Schema(ObjectType("Foo", interfaces[Unit, Unit](TestType), fields[Unit, Unit]( - Field("d", StringType, resolve = _ ⇒ "foo") + Field("d", StringType, resolve = _ => "foo") ))) } } @@ -75,11 +75,11 @@ class TypeFieldConstraintsSpec extends WordSpec with Matchers { "disallow invalid names" in { an [SchemaValidationException] should be thrownBy { val TestType = InterfaceType("Test-int", fields[Unit, Unit]( - Field("a", StringType, resolve = _ ⇒ "foo") + Field("a", StringType, resolve = _ => "foo") )) Schema(ObjectType("Foo", interfaces[Unit, Unit](TestType), fields[Unit, Unit]( - Field("d", StringType, resolve = _ ⇒ "foo") + Field("d", StringType, resolve = _ => "foo") ))) } } @@ -93,7 +93,7 @@ class TypeFieldConstraintsSpec extends WordSpec with Matchers { InputField("c", StringType) )) - InputObjectType("Test", () ⇒ List( + InputObjectType("Test", () => List( InputField("a", StringType), InputField("b", StringType), InputField("c", StringType) @@ -111,7 +111,7 @@ class TypeFieldConstraintsSpec extends WordSpec with Matchers { Schema(ObjectType("Foo", fields[Unit, Unit]( Field("d", StringType, arguments = Argument("test", TestType) :: Nil, - resolve = _ ⇒ "foo") + resolve = _ => "foo") ))) } } @@ -127,7 +127,7 @@ class TypeFieldConstraintsSpec extends WordSpec with Matchers { Schema(ObjectType("Foo", fields[Unit, Unit]( Field("d", StringType, arguments = Argument("test", TestType) :: Nil, - resolve = _ ⇒ "foo") + resolve = _ => "foo") ))) } @@ -139,7 +139,7 @@ class TypeFieldConstraintsSpec extends WordSpec with Matchers { Schema(ObjectType("Foo", fields[Unit, Unit]( Field("d", StringType, arguments = Argument("test", TestType) :: Nil, - resolve = _ ⇒ "foo") + resolve = _ => "foo") ))) } } @@ -159,7 +159,7 @@ class TypeFieldConstraintsSpec extends WordSpec with Matchers { "Schema" should { "provide a helpful error message if circular references are detected" in { val QueryType = ObjectType("Query", fields[Unit, Unit]( - Field("a", AType, resolve = _ ⇒ A(Some(B(A(None, "bar"), 1)), "foo")) + Field("a", AType, resolve = _ => A(Some(B(A(None, "bar"), 1)), "foo")) )) val error = intercept[IllegalStateException](Schema(QueryType)) @@ -169,28 +169,28 @@ class TypeFieldConstraintsSpec extends WordSpec with Matchers { "ensure that implemented fields have correct type" in { val FruitType = InterfaceType("Fruit", fields[Unit, Unit]( - Field("size", IntType, resolve = _ ⇒ 1) + Field("size", IntType, resolve = _ => 1) )) val SomeOtherInterfaceType = InterfaceType("SomeOtherInterfaceType", fields[Unit, Unit]( - Field("size", IntType, resolve = _ ⇒ 1) + Field("size", IntType, resolve = _ => 1) )) val AppleType = ObjectType("Apple", interfaces[Unit, Unit](FruitType), fields[Unit, Unit]( - Field("size", IntType, resolve = _ ⇒ 1), - Field("color", StringType, resolve = _ ⇒ "red") + Field("size", IntType, resolve = _ => 1), + Field("color", StringType, resolve = _ => "red") )) val BasketType = InterfaceType("Basket", fields[Unit, Unit]( - Field("fruit", FruitType, resolve = _ ⇒ ()) + Field("fruit", FruitType, resolve = _ => ()) )) val AppleBasketType = ObjectType("AppleBasket", interfaces[Unit, Unit](BasketType), fields[Unit, Unit]( - Field("fruit", SomeOtherInterfaceType, resolve = _ ⇒ ()) + Field("fruit", SomeOtherInterfaceType, resolve = _ => ()) )) val QueryType = ObjectType("Query", fields[Unit, Unit]( - Field("basket", BasketType, resolve = _ ⇒ ()) + Field("basket", BasketType, resolve = _ => ()) )) @@ -203,21 +203,21 @@ class TypeFieldConstraintsSpec extends WordSpec with Matchers { "ensure that all interface field arguments are present in the implementation" in { val FruitType = InterfaceType("Fruit", fields[Unit, Unit]( - Field("size", IntType, resolve = _ ⇒ 1), + Field("size", IntType, resolve = _ => 1), Field("slice", IntType, arguments = Argument("parts", IntType) :: Nil, resolve = _.args.arg[Int]("parts")) )) val AppleType = ObjectType("Apple", interfaces[Unit, Unit](FruitType), fields[Unit, Unit]( - Field("size", IntType, resolve = _ ⇒ 1), + Field("size", IntType, resolve = _ => 1), Field("slice", IntType, arguments = Argument("pieces", IntType) :: Nil, resolve = _.args.arg[Int]("pieces")) )) val QueryType = ObjectType("Query", fields[Unit, Unit]( - Field("fruit", FruitType, resolve = _ ⇒ ()) + Field("fruit", FruitType, resolve = _ => ()) )) val error = intercept[SchemaValidationException]( @@ -229,21 +229,21 @@ class TypeFieldConstraintsSpec extends WordSpec with Matchers { "ensure that all interface field argument types are the same in the implementation" in { val FruitType = InterfaceType("Fruit", fields[Unit, Unit]( - Field("size", IntType, resolve = _ ⇒ 1), + Field("size", IntType, resolve = _ => 1), Field("slice", IntType, arguments = Argument("parts", IntType) :: Nil, resolve = _.args.arg[Int]("parts")) )) val AppleType = ObjectType("Apple", interfaces[Unit, Unit](FruitType), fields[Unit, Unit]( - Field("size", IntType, resolve = _ ⇒ 1), + Field("size", IntType, resolve = _ => 1), Field("slice", IntType, arguments = Argument("parts", StringType) :: Nil, resolve = _.args.arg[String]("parts").toInt) )) val QueryType = ObjectType("Query", fields[Unit, Unit]( - Field("fruit", FruitType, resolve = _ ⇒ ()) + Field("fruit", FruitType, resolve = _ => ()) )) val error = intercept[SchemaValidationException]( @@ -255,21 +255,21 @@ class TypeFieldConstraintsSpec extends WordSpec with Matchers { "ensure that all implementation extra field arguments are optional" in { val FruitType = InterfaceType("Fruit", fields[Unit, Unit]( - Field("size", IntType, resolve = _ ⇒ 1), + Field("size", IntType, resolve = _ => 1), Field("slice", IntType, arguments = Argument("parts", IntType) :: Nil, resolve = _.args.arg[Int]("parts")) )) val AppleType = ObjectType("Apple", interfaces[Unit, Unit](FruitType), fields[Unit, Unit]( - Field("size", IntType, resolve = _ ⇒ 1), + Field("size", IntType, resolve = _ => 1), Field("slice", IntType, arguments = Argument("parts", IntType) :: Argument("careful", BooleanType) :: Nil, resolve = _.args.arg[Int]("parts")) )) val QueryType = ObjectType("Query", fields[Unit, Unit]( - Field("fruit", FruitType, resolve = _ ⇒ ()) + Field("fruit", FruitType, resolve = _ => ()) )) val error = intercept[SchemaValidationException]( @@ -334,11 +334,11 @@ class TypeFieldConstraintsSpec extends WordSpec with Matchers { } } """, - Map("data" → Map( - "basket" → Map( - "fruit" → Map( - "size" → 11, - "slice" → 5))))) + Map("data" -> Map( + "basket" -> Map( + "fruit" -> Map( + "size" -> 11, + "slice" -> 5))))) checkContainsErrors( schema, @@ -353,7 +353,7 @@ class TypeFieldConstraintsSpec extends WordSpec with Matchers { } """, null, - List("Cannot query field 'color' on type 'Fruit'. Did you mean to use an inline fragment on 'Apple'?" → List(Pos(5, 15)))) + List("Cannot query field 'color' on type 'Fruit'. Did you mean to use an inline fragment on 'Apple'?" -> List(Pos(5, 15)))) } } diff --git a/src/test/scala/sangria/starWars/StarWarsIntrospectionSpec.scala b/src/test/scala/sangria/starWars/StarWarsIntrospectionSpec.scala index e171ef1d..e0080ebf 100644 --- a/src/test/scala/sangria/starWars/StarWarsIntrospectionSpec.scala +++ b/src/test/scala/sangria/starWars/StarWarsIntrospectionSpec.scala @@ -25,24 +25,24 @@ class StarWarsIntrospectionSpec extends WordSpec with Matchers with FutureResult Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "__schema" → Map( - "types" → List( - Map("name" → "Character"), - Map("name" → "Droid"), - Map("name" → "Episode"), - Map("name" → "Human"), - Map("name" → "Query"), - Map("name" → "__Directive"), - Map("name" → "__DirectiveLocation"), - Map("name" → "__EnumValue"), - Map("name" → "__Field"), - Map("name" → "__InputValue"), - Map("name" → "__Schema"), - Map("name" → "__Type"), - Map("name" → "__TypeKind"), - Map("name" → "Boolean"), - Map("name" → "String") + "data" -> Map( + "__schema" -> Map( + "types" -> List( + Map("name" -> "Character"), + Map("name" -> "Droid"), + Map("name" -> "Episode"), + Map("name" -> "Human"), + Map("name" -> "Query"), + Map("name" -> "__Directive"), + Map("name" -> "__DirectiveLocation"), + Map("name" -> "__EnumValue"), + Map("name" -> "__Field"), + Map("name" -> "__InputValue"), + Map("name" -> "__Schema"), + Map("name" -> "__Type"), + Map("name" -> "__TypeKind"), + Map("name" -> "Boolean"), + Map("name" -> "String") ) ) ) @@ -62,10 +62,10 @@ class StarWarsIntrospectionSpec extends WordSpec with Matchers with FutureResult Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "__schema" → Map( - "queryType" → Map( - "name" → "Query" + "data" -> Map( + "__schema" -> Map( + "queryType" -> Map( + "name" -> "Query" ) ) ) @@ -83,9 +83,9 @@ class StarWarsIntrospectionSpec extends WordSpec with Matchers with FutureResult Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "__type" → Map( - "name" → "Droid" + "data" -> Map( + "__type" -> Map( + "name" -> "Droid" ) ) )) @@ -103,10 +103,10 @@ class StarWarsIntrospectionSpec extends WordSpec with Matchers with FutureResult Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "__type" → Map( - "name" → "Droid", - "kind" → "OBJECT" + "data" -> Map( + "__type" -> Map( + "name" -> "Droid", + "kind" -> "OBJECT" ) ) )) @@ -124,10 +124,10 @@ class StarWarsIntrospectionSpec extends WordSpec with Matchers with FutureResult Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "__type" → Map( - "name" → "Character", - "kind" → "INTERFACE" + "data" -> Map( + "__type" -> Map( + "name" -> "Character", + "kind" -> "INTERFACE" ) ) )) @@ -151,50 +151,50 @@ class StarWarsIntrospectionSpec extends WordSpec with Matchers with FutureResult Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "__type" → Map( - "name" → "Droid", - "fields" → List( + "data" -> Map( + "__type" -> Map( + "name" -> "Droid", + "fields" -> List( Map( - "name" → "id", - "type" → Map( - "name" → null, - "kind" → "NON_NULL" + "name" -> "id", + "type" -> Map( + "name" -> null, + "kind" -> "NON_NULL" ) ), Map( - "name" → "name", - "type" → Map( - "name" → "String", - "kind" → "SCALAR" + "name" -> "name", + "type" -> Map( + "name" -> "String", + "kind" -> "SCALAR" ) ), Map( - "name" → "friends", - "type" → Map( - "name" → null, - "kind" → "LIST" + "name" -> "friends", + "type" -> Map( + "name" -> null, + "kind" -> "LIST" ) ), Map( - "name" → "appearsIn", - "type" → Map( - "name" → null, - "kind" → "LIST" + "name" -> "appearsIn", + "type" -> Map( + "name" -> null, + "kind" -> "LIST" ) ), Map( - "name" → "primaryFunction", - "type" → Map( - "name" → "String", - "kind" → "SCALAR" + "name" -> "primaryFunction", + "type" -> Map( + "name" -> "String", + "kind" -> "SCALAR" ) ), Map( - "name" → "secretBackstory", - "type" → Map( - "name" → "String", - "kind" → "SCALAR" + "name" -> "secretBackstory", + "type" -> Map( + "name" -> "String", + "kind" -> "SCALAR" ) ) ) @@ -225,65 +225,65 @@ class StarWarsIntrospectionSpec extends WordSpec with Matchers with FutureResult Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "__type" → Map( - "name" → "Droid", - "fields" → List( + "data" -> Map( + "__type" -> Map( + "name" -> "Droid", + "fields" -> List( Map( - "name" → "id", - "type" → Map( - "name" → null, - "kind" → "NON_NULL", - "ofType" → Map( - "name" → "String", - "kind" → "SCALAR" + "name" -> "id", + "type" -> Map( + "name" -> null, + "kind" -> "NON_NULL", + "ofType" -> Map( + "name" -> "String", + "kind" -> "SCALAR" ) ) ), Map( - "name" → "name", - "type" → Map( - "name" → "String", - "kind" → "SCALAR", - "ofType" → null + "name" -> "name", + "type" -> Map( + "name" -> "String", + "kind" -> "SCALAR", + "ofType" -> null ) ), Map( - "name" → "friends", - "type" → Map( - "name" → null, - "kind" → "LIST", - "ofType" → Map( - "name" → "Character", - "kind" → "INTERFACE" + "name" -> "friends", + "type" -> Map( + "name" -> null, + "kind" -> "LIST", + "ofType" -> Map( + "name" -> "Character", + "kind" -> "INTERFACE" ) ) ), Map( - "name" → "appearsIn", - "type" → Map( - "name" → null, - "kind" → "LIST", - "ofType" → Map( - "name" → "Episode", - "kind" → "ENUM" + "name" -> "appearsIn", + "type" -> Map( + "name" -> null, + "kind" -> "LIST", + "ofType" -> Map( + "name" -> "Episode", + "kind" -> "ENUM" ) ) ), Map( - "name" → "primaryFunction", - "type" → Map( - "name" → "String", - "kind" → "SCALAR", - "ofType" → null + "name" -> "primaryFunction", + "type" -> Map( + "name" -> "String", + "kind" -> "SCALAR", + "ofType" -> null ) ), Map( - "name" → "secretBackstory", - "type" → Map( - "name" → "String", - "kind" → "SCALAR", - "ofType" → null + "name" -> "secretBackstory", + "type" -> Map( + "name" -> "String", + "kind" -> "SCALAR", + "ofType" -> null ) ) ) @@ -321,61 +321,61 @@ class StarWarsIntrospectionSpec extends WordSpec with Matchers with FutureResult Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "__schema" → Map( - "queryType" → Map( - "fields" → List( + "data" -> Map( + "__schema" -> Map( + "queryType" -> Map( + "fields" -> List( Map( - "name" → "hero", - "args" → List( + "name" -> "hero", + "args" -> List( Map( - "defaultValue" → null, - "description" → ( + "defaultValue" -> null, + "description" -> ( "If omitted, returns the hero of the whole " + "saga. If provided, returns the hero of " + "that particular episode."), - "name" → "episode", - "type" → Map( - "kind" → "ENUM", - "name" → "Episode", - "ofType" → null + "name" -> "episode", + "type" -> Map( + "kind" -> "ENUM", + "name" -> "Episode", + "ofType" -> null ) ) ) ), Map( - "name" → "human", - "args" → List( + "name" -> "human", + "args" -> List( Map( - "name" → "id", - "description" → "id of the character", - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "SCALAR", - "name" → "String" + "name" -> "id", + "description" -> "id of the character", + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "SCALAR", + "name" -> "String" ) ), - "defaultValue" → null + "defaultValue" -> null ) ) ), Map( - "name" → "droid", - "args" → List( + "name" -> "droid", + "args" -> List( Map( - "name" → "id", - "description" → "id of the character", - "type" → Map( - "kind" → "NON_NULL", - "name" → null, - "ofType" → Map( - "kind" → "SCALAR", - "name" → "String" + "name" -> "id", + "description" -> "id of the character", + "type" -> Map( + "kind" -> "NON_NULL", + "name" -> null, + "ofType" -> Map( + "kind" -> "SCALAR", + "name" -> "String" ) ), - "defaultValue" → null + "defaultValue" -> null ) ) ) @@ -398,13 +398,13 @@ class StarWarsIntrospectionSpec extends WordSpec with Matchers with FutureResult Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "__type" → Map( - "name" → "Droid", - "description" → "A mechanical creature in the Star Wars universe." + "data" -> Map( + "__type" -> Map( + "name" -> "Droid", + "description" -> "A mechanical creature in the Star Wars universe." ) ) )) } } -} \ No newline at end of file +} diff --git a/src/test/scala/sangria/starWars/StarWarsQuerySpec.scala b/src/test/scala/sangria/starWars/StarWarsQuerySpec.scala index 08947773..ab49b70f 100644 --- a/src/test/scala/sangria/starWars/StarWarsQuerySpec.scala +++ b/src/test/scala/sangria/starWars/StarWarsQuerySpec.scala @@ -26,9 +26,9 @@ class StarWarsQuerySpec extends WordSpec with Matchers with FutureResultSupport Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "hero" → Map( - "name" → "R2-D2")))) + "data" -> Map( + "hero" -> Map( + "name" -> "R2-D2")))) } "Allows us to query for the ID and friends of R2-D2" in { @@ -46,14 +46,14 @@ class StarWarsQuerySpec extends WordSpec with Matchers with FutureResultSupport Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "hero" → Map( - "id" → "2001", - "name" → "R2-D2", - "friends" → List( - Map("name" → "Luke Skywalker"), - Map("name" → "Han Solo"), - Map("name" → "Leia Organa") + "data" -> Map( + "hero" -> Map( + "id" -> "2001", + "name" -> "R2-D2", + "friends" -> List( + Map("name" -> "Luke Skywalker"), + Map("name" -> "Han Solo"), + Map("name" -> "Leia Organa") ))))) } @@ -74,21 +74,21 @@ class StarWarsQuerySpec extends WordSpec with Matchers with FutureResultSupport "HeroOnlyQuery", fields[CharacterRepo, Unit]( Field("hero", TestSchema.Character, arguments = TestSchema.EpisodeArg :: Nil, - resolve = (ctx) ⇒ ctx.ctx.getHero(ctx.arg(TestSchema.EpisodeArg))) + resolve = (ctx) => ctx.ctx.getHero(ctx.arg(TestSchema.EpisodeArg))) )) val heroOnlySchema = Schema(HeroOnlyQuery, additionalTypes = TestSchema.Human :: TestSchema.Droid :: Nil) Executor.execute(heroOnlySchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "hero" → Map( - "id" → "2001", - "name" → "R2-D2", - "friends" → List( - Map("name" → "Luke Skywalker"), - Map("name" → "Han Solo"), - Map("name" → "Leia Organa") + "data" -> Map( + "hero" -> Map( + "id" -> "2001", + "name" -> "R2-D2", + "friends" -> List( + Map("name" -> "Luke Skywalker"), + Map("name" -> "Han Solo"), + Map("name" -> "Leia Organa") ))))) } } @@ -112,37 +112,37 @@ class StarWarsQuerySpec extends WordSpec with Matchers with FutureResultSupport Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "hero" → Map( - "name" → "R2-D2", - "friends" → List( + "data" -> Map( + "hero" -> Map( + "name" -> "R2-D2", + "friends" -> List( Map( - "name" → "Luke Skywalker", - "appearsIn" → List("NEWHOPE", "EMPIRE", "JEDI"), - "friends" → List( - Map("name" → "Han Solo"), - Map("name" → "Leia Organa"), - Map("name" → "C-3PO"), - Map("name" → "R2-D2") + "name" -> "Luke Skywalker", + "appearsIn" -> List("NEWHOPE", "EMPIRE", "JEDI"), + "friends" -> List( + Map("name" -> "Han Solo"), + Map("name" -> "Leia Organa"), + Map("name" -> "C-3PO"), + Map("name" -> "R2-D2") ) ), Map( - "name" → "Han Solo", - "appearsIn" → List("NEWHOPE", "EMPIRE", "JEDI"), - "friends" → List( - Map("name" → "Luke Skywalker"), - Map("name" → "Leia Organa"), - Map("name" → "R2-D2") + "name" -> "Han Solo", + "appearsIn" -> List("NEWHOPE", "EMPIRE", "JEDI"), + "friends" -> List( + Map("name" -> "Luke Skywalker"), + Map("name" -> "Leia Organa"), + Map("name" -> "R2-D2") ) ), Map( - "name" → "Leia Organa", - "appearsIn" → List("NEWHOPE", "EMPIRE", "JEDI"), - "friends" → List( - Map("name" → "Luke Skywalker"), - Map("name" → "Han Solo"), - Map("name" → "C-3PO"), - Map("name" → "R2-D2") + "name" -> "Leia Organa", + "appearsIn" -> List("NEWHOPE", "EMPIRE", "JEDI"), + "friends" -> List( + Map("name" -> "Luke Skywalker"), + Map("name" -> "Han Solo"), + Map("name" -> "C-3PO"), + Map("name" -> "R2-D2") ) ) ) @@ -165,9 +165,9 @@ class StarWarsQuerySpec extends WordSpec with Matchers with FutureResultSupport Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "human" → Map( - "name" → "Luke Skywalker" + "data" -> Map( + "human" -> Map( + "name" -> "Luke Skywalker" ) ) )) @@ -182,13 +182,13 @@ class StarWarsQuerySpec extends WordSpec with Matchers with FutureResultSupport } """ - val args = mapVars("someId" → "1000") + val args = mapVars("someId" -> "1000") Executor.execute(StarWarsSchema, query, new CharacterRepo, variables = args, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "human" → Map( - "name" → "Luke Skywalker" + "data" -> Map( + "human" -> Map( + "name" -> "Luke Skywalker" ) ) )) @@ -203,13 +203,13 @@ class StarWarsQuerySpec extends WordSpec with Matchers with FutureResultSupport } """ - val args = mapVars("someId" → "1002") + val args = mapVars("someId" -> "1002") Executor.execute(StarWarsSchema, query, new CharacterRepo, variables = args, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "human" → Map( - "name" → "Han Solo" + "data" -> Map( + "human" -> Map( + "name" -> "Han Solo" ) ) )) @@ -224,12 +224,12 @@ class StarWarsQuerySpec extends WordSpec with Matchers with FutureResultSupport } """ - val args = mapVars("id" → "not a valid id") + val args = mapVars("id" -> "not a valid id") Executor.execute(StarWarsSchema, query, new CharacterRepo, variables = args, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "human" → null + "data" -> Map( + "human" -> null ) )) } @@ -248,9 +248,9 @@ class StarWarsQuerySpec extends WordSpec with Matchers with FutureResultSupport Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "luke" → Map( - "name" → "Luke Skywalker") + "data" -> Map( + "luke" -> Map( + "name" -> "Luke Skywalker") ) )) } @@ -270,11 +270,11 @@ class StarWarsQuerySpec extends WordSpec with Matchers with FutureResultSupport Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "luke" → Map( - "name" → "Luke Skywalker"), - "leia" → Map( - "name" → "Leia Organa") + "data" -> Map( + "luke" -> Map( + "name" -> "Luke Skywalker"), + "leia" -> Map( + "name" -> "Leia Organa") ) )) } @@ -297,13 +297,13 @@ class StarWarsQuerySpec extends WordSpec with Matchers with FutureResultSupport Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "luke" → Map( - "name" → "Luke Skywalker", - "homePlanet" → "Tatooine"), - "leia" → Map( - "name" → "Leia Organa", - "homePlanet" → "Alderaan") + "data" -> Map( + "luke" -> Map( + "name" -> "Luke Skywalker", + "homePlanet" -> "Tatooine"), + "leia" -> Map( + "name" -> "Leia Organa", + "homePlanet" -> "Alderaan") ) )) } @@ -327,13 +327,13 @@ class StarWarsQuerySpec extends WordSpec with Matchers with FutureResultSupport Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "luke" → Map( - "name" → "Luke Skywalker", - "homePlanet" → "Tatooine"), - "leia" → Map( - "name" → "Leia Organa", - "homePlanet" → "Alderaan") + "data" -> Map( + "luke" -> Map( + "name" -> "Luke Skywalker", + "homePlanet" -> "Tatooine"), + "leia" -> Map( + "name" -> "Leia Organa", + "homePlanet" -> "Alderaan") ) )) } @@ -352,10 +352,10 @@ class StarWarsQuerySpec extends WordSpec with Matchers with FutureResultSupport Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "hero" → Map( - "__typename" → "Droid", - "name" → "R2-D2") + "data" -> Map( + "hero" -> Map( + "__typename" -> "Droid", + "name" -> "R2-D2") ) )) } @@ -372,10 +372,10 @@ class StarWarsQuerySpec extends WordSpec with Matchers with FutureResultSupport Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await should be ( Map( - "data" → Map( - "hero" → Map( - "__typename" → "Human", - "name" → "Luke Skywalker") + "data" -> Map( + "hero" -> Map( + "__typename" -> "Human", + "name" -> "Luke Skywalker") ) )) } @@ -395,16 +395,16 @@ class StarWarsQuerySpec extends WordSpec with Matchers with FutureResultSupport val res = Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await.asInstanceOf[Map[String, Any]] res("data") should be ( - Map("hero" → Map("name" → "R2-D2", "secretBackstory" → null))) + Map("hero" -> Map("name" -> "R2-D2", "secretBackstory" -> null))) val errors = res("errors").asInstanceOf[Seq[Any]] errors should ( have(size(1)) and contain(Map( - "message" → "secretBackstory is secret.", - "path" → List("hero", "secretBackstory"), - "locations" → Vector(Map("line" → 5, "column" → 13))))) + "message" -> "secretBackstory is secret.", + "path" -> List("hero", "secretBackstory"), + "locations" -> Vector(Map("line" -> 5, "column" -> 13))))) } "Correctly reports error on accessing secretBackstory in a list" in { @@ -423,30 +423,30 @@ class StarWarsQuerySpec extends WordSpec with Matchers with FutureResultSupport val res = Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await.asInstanceOf[Map[String, Any]] res("data") should be ( - Map("hero" → + Map("hero" -> Map( - "name" → "R2-D2", - "friends" → Vector( - Map("name" → "Luke Skywalker", "secretBackstory" → null), - Map("name" → "Han Solo", "secretBackstory" → null), - Map("name" → "Leia Organa", "secretBackstory" → null))))) + "name" -> "R2-D2", + "friends" -> Vector( + Map("name" -> "Luke Skywalker", "secretBackstory" -> null), + Map("name" -> "Han Solo", "secretBackstory" -> null), + Map("name" -> "Leia Organa", "secretBackstory" -> null))))) val errors = res("errors").asInstanceOf[Seq[Any]] errors should ( have(size(3)) and contain(Map( - "message" → "secretBackstory is secret.", - "path" → Vector("hero", "friends", 0, "secretBackstory"), - "locations" → Vector(Map("line" → 7, "column" → 15)))) and + "message" -> "secretBackstory is secret.", + "path" -> Vector("hero", "friends", 0, "secretBackstory"), + "locations" -> Vector(Map("line" -> 7, "column" -> 15)))) and contain(Map( - "message" → "secretBackstory is secret.", - "path" → Vector("hero", "friends", 1, "secretBackstory"), - "locations" → Vector(Map("line" → 7, "column" → 15)))) and + "message" -> "secretBackstory is secret.", + "path" -> Vector("hero", "friends", 1, "secretBackstory"), + "locations" -> Vector(Map("line" -> 7, "column" -> 15)))) and contain(Map( - "message" → "secretBackstory is secret.", - "path" → Vector("hero", "friends", 2, "secretBackstory"), - "locations" → Vector(Map("line" → 7, "column" → 15))))) + "message" -> "secretBackstory is secret.", + "path" -> Vector("hero", "friends", 2, "secretBackstory"), + "locations" -> Vector(Map("line" -> 7, "column" -> 15))))) } "Correctly reports error on accessing through an alias" in { @@ -462,26 +462,26 @@ class StarWarsQuerySpec extends WordSpec with Matchers with FutureResultSupport val res = Executor.execute(StarWarsSchema, query, new CharacterRepo, deferredResolver = new FriendsResolver).await.asInstanceOf[Map[String, Any]] res("data") should be ( - Map("mainHero" → Map("name" → "R2-D2", "story" → null))) + Map("mainHero" -> Map("name" -> "R2-D2", "story" -> null))) val errors = res("errors").asInstanceOf[Seq[Any]] errors should ( have(size(1)) and contain(Map( - "message" → "secretBackstory is secret.", - "path" → List("mainHero", "story"), - "locations" → Vector(Map("line" → 5, "column" → 13))))) + "message" -> "secretBackstory is secret.", + "path" -> List("mainHero", "story"), + "locations" -> Vector(Map("line" -> 5, "column" -> 13))))) } "Full response path is included when fields are non-nullable" in { - lazy val A: ObjectType[Unit, Any] = ObjectType("A", () ⇒ fields( - Field("nullableA", OptionType(A), resolve = _ ⇒ ""), - Field("nonNullA", A, resolve = _ ⇒ ""), - Field("throws", A, resolve = _ ⇒ throw PrivacyError("Catch me if you can")))) + lazy val A: ObjectType[Unit, Any] = ObjectType("A", () => fields( + Field("nullableA", OptionType(A), resolve = _ => ""), + Field("nonNullA", A, resolve = _ => ""), + Field("throws", A, resolve = _ => throw PrivacyError("Catch me if you can")))) val Query = ObjectType("Query", fields[Unit, Unit]( - Field("nullableA", OptionType(A), resolve = _ ⇒ ""))) + Field("nullableA", OptionType(A), resolve = _ => ""))) val schema = Schema(Query) @@ -502,16 +502,16 @@ class StarWarsQuerySpec extends WordSpec with Matchers with FutureResultSupport val res = Executor.execute(schema, query, queryValidator = QueryValidator.empty).await.asInstanceOf[Map[String, Any]] res("data") should be ( - Map("nullableA" → Map("nullableA" → null))) + Map("nullableA" -> Map("nullableA" -> null))) val errors = res("errors").asInstanceOf[Seq[Any]] errors should ( have(size(1)) and contain(Map( - "message" → "Catch me if you can", - "path" → List("nullableA", "nullableA", "nonNullA", "nonNullA", "throws"), - "locations" → List(Map("line" → 7, "column" → 19))))) + "message" -> "Catch me if you can", + "path" -> List("nullableA", "nullableA", "nonNullA", "nonNullA", "throws"), + "locations" -> List(Map("line" -> 7, "column" -> 19))))) } } -} \ No newline at end of file +} diff --git a/src/test/scala/sangria/starWars/TestData.scala b/src/test/scala/sangria/starWars/TestData.scala index fdf28647..dbdc3add 100644 --- a/src/test/scala/sangria/starWars/TestData.scala +++ b/src/test/scala/sangria/starWars/TestData.scala @@ -70,19 +70,19 @@ object TestData { class FriendsResolver extends DeferredResolver[Any] { override def resolve(deferred: Vector[Deferred[Any]], ctx: Any, queryState: Any)(implicit ec: ExecutionContext) = deferred map { - case DeferFriends(friendIds) ⇒ - Future.fromTry(Try(friendIds map (id ⇒ characters.find(_.id == id)))) + case DeferFriends(friendIds) => + Future.fromTry(Try(friendIds map (id => characters.find(_.id == id)))) } } class CharacterRepo { def getHero(episode: Option[Episode.Value]) = - episode flatMap (_ ⇒ getHuman("1000")) getOrElse characters.last + episode flatMap (_ => getHuman("1000")) getOrElse characters.last - def getHuman(id: String): Option[Human] = characters.find(c ⇒ c.isInstanceOf[Human] && c.id == id).asInstanceOf[Option[Human]] + def getHuman(id: String): Option[Human] = characters.find(c => c.isInstanceOf[Human] && c.id == id).asInstanceOf[Option[Human]] - def getDroid(id: String): Option[Droid] = characters.find(c ⇒ c.isInstanceOf[Droid] && c.id == id).asInstanceOf[Option[Droid]] + def getDroid(id: String): Option[Droid] = characters.find(c => c.isInstanceOf[Droid] && c.id == id).asInstanceOf[Option[Droid]] - def getCharacters(ids: Seq[String]): Seq[Character] = ids.flatMap(id ⇒ characters.find(_.id == id)) + def getCharacters(ids: Seq[String]): Seq[Character] = ids.flatMap(id => characters.find(_.id == id)) } } diff --git a/src/test/scala/sangria/starWars/TestSchema.scala b/src/test/scala/sangria/starWars/TestSchema.scala index a8bc0102..82085fcd 100644 --- a/src/test/scala/sangria/starWars/TestSchema.scala +++ b/src/test/scala/sangria/starWars/TestSchema.scala @@ -27,7 +27,7 @@ object TestSchema { InterfaceType( "Character", "A character in the Star Wars Trilogy", - () ⇒ fields[Unit, TestData.Character]( + () => fields[Unit, TestData.Character]( Field("id", StringType, Some("The id of the character."), resolve = _.value.id), @@ -36,13 +36,13 @@ object TestSchema { resolve = _.value.name), Field("friends", OptionType(ListType(OptionType(Character))), Some("The friends of the character, or an empty list if they have none."), - resolve = ctx ⇒ DeferFriends(ctx.value.friends)), + resolve = ctx => DeferFriends(ctx.value.friends)), Field("appearsIn", OptionType(ListType(OptionType(EpisodeEnum))), Some("Which movies they appear in."), - resolve = _.value.appearsIn map (e ⇒ Some(e))), + resolve = _.value.appearsIn map (e => Some(e))), Field("secretBackstory", OptionType(StringType), Some("Where are they from and how they came to be who they are."), - resolve = _ ⇒ throw PrivacyError("secretBackstory is secret.")) + resolve = _ => throw PrivacyError("secretBackstory is secret.")) )) val Human = @@ -59,10 +59,10 @@ object TestSchema { resolve = _.value.name), Field("friends", OptionType(ListType(OptionType(Character))), Some("The friends of the human, or an empty list if they have none."), - resolve = (ctx) ⇒ DeferFriends(ctx.value.friends)), + resolve = (ctx) => DeferFriends(ctx.value.friends)), Field("appearsIn", OptionType(ListType(OptionType(EpisodeEnum))), Some("Which movies they appear in."), - resolve = _.value.appearsIn map (e ⇒ Some(e))), + resolve = _.value.appearsIn map (e => Some(e))), Field("homePlanet", OptionType(StringType), Some("The home planet of the human, or null if unknown."), resolve = _.value.homePlanet) @@ -79,13 +79,13 @@ object TestSchema { resolve = _.value.id), Field("name", OptionType(StringType), Some("The name of the droid."), - resolve = ctx ⇒ Future.successful(ctx.value.name)), + resolve = ctx => Future.successful(ctx.value.name)), Field("friends", OptionType(ListType(OptionType(Character))), Some("The friends of the droid, or an empty list if they have none."), - resolve = ctx ⇒ DeferFriends(ctx.value.friends)), + resolve = ctx => DeferFriends(ctx.value.friends)), Field("appearsIn", OptionType(ListType(OptionType(EpisodeEnum))), Some("Which movies they appear in."), - resolve = _.value.appearsIn map (e ⇒ Some(e))), + resolve = _.value.appearsIn map (e => Some(e))), Field("primaryFunction", OptionType(StringType), Some("The primary function of the droid."), resolve = _.value.primaryFunction) @@ -100,13 +100,13 @@ object TestSchema { "Query", fields[CharacterRepo, Unit]( Field("hero", Character, arguments = EpisodeArg :: Nil, - resolve = (ctx) ⇒ ctx.ctx.getHero(ctx.arg(EpisodeArg))), + resolve = (ctx) => ctx.ctx.getHero(ctx.arg(EpisodeArg))), Field("human", OptionType(Human), arguments = ID :: Nil, - resolve = ctx ⇒ ctx.ctx.getHuman(ctx arg ID)), + resolve = ctx => ctx.ctx.getHuman(ctx arg ID)), Field("droid", Droid, arguments = ID :: Nil, - resolve = Projector((ctx, f)⇒ ctx.ctx.getDroid(ctx arg ID).get)) + resolve = Projector((ctx, f)=> ctx.ctx.getDroid(ctx arg ID).get)) )) val StarWarsSchema = Schema(Query) diff --git a/src/test/scala/sangria/streaming/StreamSpec.scala b/src/test/scala/sangria/streaming/StreamSpec.scala index 950c78fd..bec90ad7 100644 --- a/src/test/scala/sangria/streaming/StreamSpec.scala +++ b/src/test/scala/sangria/streaming/StreamSpec.scala @@ -29,14 +29,14 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { import scala.concurrent.ExecutionContext.Implicits.global val QueryType = ObjectType("QueryType", fields[Unit, Unit]( - Field("hello", StringType, resolve = _ ⇒ "world") + Field("hello", StringType, resolve = _ => "world") )) val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( - Field.subs("letters", StringType, resolve = _ ⇒ + Field.subs("letters", StringType, resolve = _ => Observable.from(List("a", "b").map(Action(_)))), - Field.subs("numbers", OptionType(IntType), resolve = _ ⇒ + Field.subs("numbers", OptionType(IntType), resolve = _ => Observable.from(List(1, 2).map(Action(_)))) )) @@ -70,14 +70,14 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { import sangria.streaming.monix._ val QueryType = ObjectType("QueryType", fields[Unit, Unit]( - Field("hello", StringType, resolve = _ ⇒ "world"))) + Field("hello", StringType, resolve = _ => "world"))) "Stream results with monix" in { val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( - Field.subs("letters", StringType, resolve = _ ⇒ + Field.subs("letters", StringType, resolve = _ => Observable("a", "b").map(Action(_))), - Field.subs("numbers", OptionType(IntType), resolve = _ ⇒ + Field.subs("numbers", OptionType(IntType), resolve = _ => Observable(1, 2).map(Action(_))) )) @@ -101,13 +101,13 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { "recover stream errors" in { val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( - Field.subs("letters", OptionType(StringType), resolve = _ ⇒ - Observable("a", "b", "c", "d", "e").map { l ⇒ + Field.subs("letters", OptionType(StringType), resolve = _ => + Observable("a", "b", "c", "d", "e").map { l => if (l == "c") throw new IllegalStateException("foo") else l }.map(Action(_))), - Field.subs("numbers", OptionType(IntType), resolve = _ ⇒ + Field.subs("numbers", OptionType(IntType), resolve = _ => Observable(1, 2, 3, 4).map(Action(_))) )) @@ -116,7 +116,7 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { import sangria.execution.ExecutionScheme.Stream val exceptionHandler = ExceptionHandler { - case (m, e: IllegalStateException) ⇒ HandledException(e.getMessage) + case (m, e: IllegalStateException) => HandledException(e.getMessage) } val stream: Observable[JsValue] = @@ -141,7 +141,7 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { case class FruitSmashed(id: Int) trait Mutation { - this: Ctx ⇒ + this: Ctx => @GraphQLField def eatFruit(name: String, eater: String): String = { @@ -170,7 +170,7 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { } val cherryPicker = Fetcher.caching[Ctx, Fruit, Int]( - (ctx, ids) ⇒ Future.successful(ids.map(id ⇒ Fruit(id, "cherry", "red"))))(HasId(_.id)) + (ctx, ids) => Future.successful(ids.map(id => Fruit(id, "cherry", "red"))))(HasId(_.id)) val FruitType = ObjectType("Fruit", fields[Unit, Fruit]( Field("name", StringType, resolve = _.value.name), @@ -183,19 +183,19 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { )) val FruitSmashedType = ObjectType("FruitSmashed", fields[Unit, FruitSmashed]( - Field("fruit", FruitType, resolve = c ⇒ cherryPicker.defer(c.value.id)) + Field("fruit", FruitType, resolve = c => cherryPicker.defer(c.value.id)) )) val FruitEventType = UnionType("FruitEvent", types = FruitEatenType :: FruitSmashedType :: Nil) val QueryType = ObjectType("QueryType", fields[Ctx, Unit]( - Field("hello", StringType, resolve = _ ⇒ "world"))) + Field("hello", StringType, resolve = _ => "world"))) val MutationType = deriveContextObjectType[Ctx, Mutation, Unit](identity) val SubscriptionType = ObjectType("Subscription", fields[Ctx, Unit]( Field.subs("fruitEvents", OptionType(FruitEventType), resolve = - c ⇒ c.ctx.eventBus.map(Action(_))) + c => c.ctx.eventBus.map(Action(_))) )) val schema = Schema(QueryType, Some(MutationType), Some(SubscriptionType)) @@ -203,7 +203,7 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { import sangria.execution.ExecutionScheme.Stream val exceptionHandler = ExceptionHandler { - case (m, e: IllegalStateException) ⇒ HandledException(e.getMessage) + case (m, e: IllegalStateException) => HandledException(e.getMessage) } val subscription = @@ -265,14 +265,14 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { import spray.json._ val QueryType = ObjectType("QueryType", fields[Unit, Unit]( - Field("hello", StringType, resolve = _ ⇒ "world"))) + Field("hello", StringType, resolve = _ => "world"))) "return extended stream result" in { import _root_.monix.reactive.Observable import sangria.streaming.monix._ val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( - Field.subs("letters", OptionType(StringType), resolve = _ ⇒ + Field.subs("letters", OptionType(StringType), resolve = _ => Observable("a", "b", "c").map(Action(_))))) val schema = Schema(QueryType, subscription = Some(SubscriptionType)) @@ -296,8 +296,8 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( Field.subs("letters", OptionType(StringType), - resolve = _ ⇒ Observable("a").map(Action(_))), - Field("hello", StringType, resolve = _ ⇒ "world"))) + resolve = _ => Observable("a").map(Action(_))), + Field("hello", StringType, resolve = _ => "world"))) val error = intercept [SchemaValidationException] (Schema(QueryType, subscription = Some(SubscriptionType))) @@ -312,7 +312,7 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { import rx.lang.scala.Observable Field.subs("letters", OptionType(StringType), - resolve = (_: Context[Unit, Unit]) ⇒ Observable.from(List("a")).map(Action(_))) + resolve = (_: Context[Unit, Unit]) => Observable.from(List("a")).map(Action(_))) } val f2 = { @@ -320,7 +320,7 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { import _root_.monix.reactive.Observable Field.subs("otherLetters", OptionType(StringType), - resolve = (_: Context[Unit, Unit]) ⇒ Observable("a").map(Action(_))) + resolve = (_: Context[Unit, Unit]) => Observable("a").map(Action(_))) } val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit](f1, f2)) @@ -338,7 +338,7 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { import sangria.streaming.monix._ ObjectType("Subscription", fields[Unit, Unit]( - Field.subs("letters", OptionType(StringType), resolve = _ ⇒ + Field.subs("letters", OptionType(StringType), resolve = _ => Observable("a", "b", "c").map(Action(_))))) } @@ -357,10 +357,10 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { import sangria.streaming.monix._ val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( - Field.subs("letters", OptionType(StringType), resolve = _ ⇒ + Field.subs("letters", OptionType(StringType), resolve = _ => Observable("a", "b").map(Action(_))), - Field.subs("numbers", OptionType(IntType), resolve = _ ⇒ + Field.subs("numbers", OptionType(IntType), resolve = _ => Observable(1, 2).map(Action(_))) )) @@ -380,8 +380,8 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { import sangria.streaming.monix._ val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( - Field("letters", OptionType(StringType), resolve = _ ⇒ Some("a")), - Field("numbers", IntType, resolve = _ ⇒ 10))) + Field("letters", OptionType(StringType), resolve = _ => Some("a")), + Field("numbers", IntType, resolve = _ => 10))) val schema = Schema(QueryType, subscription = Some(SubscriptionType)) diff --git a/src/test/scala/sangria/util/CatsSupport.scala b/src/test/scala/sangria/util/CatsSupport.scala index bd7cd414..9c917e50 100644 --- a/src/test/scala/sangria/util/CatsSupport.scala +++ b/src/test/scala/sangria/util/CatsSupport.scala @@ -27,13 +27,13 @@ import JsonAndYamlHelpers._ * 3. Execute a test query against the generated schema ("when" part of the scenario) * 4. Assert results of the execution ("then" part of the scenario) */ -trait CatsSupport { this: WordSpec with Matchers ⇒ +trait CatsSupport { this: WordSpec with Matchers => import CatsScenarioData._ import CatsAssertions._ import CatsScenarioExecutor._ def generateTests(path: String) = { - FileUtil.loadScenarios(path) foreach { file ⇒ + FileUtil.loadScenarios(path) foreach { file => val scenario: YamlValue = file.scenario scenario("scenario").stringValue should { @@ -41,7 +41,7 @@ trait CatsSupport { this: WordSpec with Matchers ⇒ val bgBuilder = schemaBuilder(bgTestData getOrElse JsObject.empty) val bgSchema = getSchema(scenario.get("background"), file.folder) map (Schema.buildFromAst(_, bgBuilder)) - scenario("tests").arrayValue foreach { test ⇒ + scenario("tests").arrayValue foreach { test => val testName = test("name").stringValue testName in { @@ -50,8 +50,8 @@ trait CatsSupport { this: WordSpec with Matchers ⇒ val testSchema = getSchema(test.get("given"), file.folder) map (Schema.buildFromAst(_, testBuilder)) orElse { testTestData match { - case Some(newTestData) ⇒ getSchema(scenario.get("given"), file.folder) map (Schema.buildFromAst(_, testBuilder)) - case None ⇒ bgSchema + case Some(newTestData) => getSchema(scenario.get("given"), file.folder) map (Schema.buildFromAst(_, testBuilder)) + case None => bgSchema } } @@ -63,7 +63,7 @@ trait CatsSupport { this: WordSpec with Matchers ⇒ val result = executeAction(given, action) - assertions foreach { a ⇒ + assertions foreach { a => assertActionResult(result, a) } } @@ -102,77 +102,77 @@ object CatsScenarioExecutor extends FutureResultSupport { val resolvePromiseRejectList = Directive("resolvePromiseRejectList", arguments = ValuesArg :: MessagesArg :: Nil, locations = Set(DirectiveLocation.FieldDefinition)) def schemaBuilder(testData: JsValue): AstSchemaBuilder[Any] = AstSchemaBuilder.resolverBased[Any]( - DirectiveResolver(ResolveString, c ⇒ + DirectiveResolver(ResolveString, c => correctValue(c.ctx.field.fieldType, replacePlaceholders(c arg ValueArg, c.ctx.args))), - DirectiveResolver(ArgumentsJson, c ⇒ { + DirectiveResolver(ArgumentsJson, c => { def handleValue(v: Any) = v match { - case v: String ⇒ JsString(v) - case v: Boolean ⇒ JsBoolean(v) - case v: Int ⇒ JsNumber(v) + case v: String => JsString(v) + case v: Boolean => JsBoolean(v) + case v: Int => JsNumber(v) } val argsJson = c.ctx.args.raw flatMap { - case (k, Some(v)) ⇒ Some(k → handleValue(v)) - case (k, None) ⇒ None - case (k, v) ⇒ Some(k → handleValue(v)) + case (k, Some(v)) => Some(k -> handleValue(v)) + case (k, None) => None + case (k, v) => Some(k -> handleValue(v)) } correctValue(c.ctx.field.fieldType, JsObject(argsJson).compactPrint) }), - DirectiveResolver(ResolvePromiseString, c ⇒ { + DirectiveResolver(ResolvePromiseString, c => { Future { Thread.sleep((math.random * 50).toLong) correctValue(c.ctx.field.fieldType, replacePlaceholders(c arg ValueArg, c.ctx.args)) } }), - DirectiveResolver(ResolveEmptyObject, c ⇒ correctValue(c.ctx.field.fieldType, JsObject.empty)), + DirectiveResolver(ResolveEmptyObject, c => correctValue(c.ctx.field.fieldType, JsObject.empty)), - DirectiveResolver(ResolveTestData, c ⇒ correctValue(c.ctx.field.fieldType, testData(c arg NameArg))), + DirectiveResolver(ResolveTestData, c => correctValue(c.ctx.field.fieldType, testData(c arg NameArg))), - DirectiveResolver(ResolvePromiseTestData, c ⇒ Future { + DirectiveResolver(ResolvePromiseTestData, c => Future { Thread.sleep((math.random * 50).toLong) correctValue(c.ctx.field.fieldType, testData(c arg NameArg)) }), - DirectiveResolver(ResolvePromise, c ⇒ Future { + DirectiveResolver(ResolvePromise, c => Future { Thread.sleep((math.random * 50).toLong) extractCorrectValue(c.ctx.field.fieldType, c.ctx.value.asInstanceOf[JsValue].get(c.fieldDefinition.name), testData) }), - DirectiveResolver(ResolveError, c ⇒ throw ResolveException(c arg MessageArg)), + DirectiveResolver(ResolveError, c => throw ResolveException(c arg MessageArg)), - DirectiveResolver(ResolvePromiseReject, c ⇒ Future.failed[Any](ResolveException(c arg MessageArg))), + DirectiveResolver(ResolvePromiseReject, c => Future.failed[Any](ResolveException(c arg MessageArg))), - DirectiveResolver(ResolveErrorList, c ⇒ + DirectiveResolver(ResolveErrorList, c => PartialValue( correctValue(c.ctx.field.fieldType, c arg ValuesArg), c.arg(MessagesArg).map(ResolveException(_)).toVector)), - DirectiveResolver(resolvePromiseRejectList, c ⇒ + DirectiveResolver(resolvePromiseRejectList, c => PartialFutureValue(Future.successful(PartialValue[Any, Any]( correctValue(c.ctx.field.fieldType, c arg ValuesArg), c.arg(MessagesArg).map(ResolveException(_)).toVector)))), AnyFieldResolver { - case _ ⇒ c ⇒ extractCorrectValue(c.field.fieldType, c.value.asInstanceOf[JsValue].get(c.field.name), testData) + case _ => c => extractCorrectValue(c.field.fieldType, c.value.asInstanceOf[JsValue].get(c.field.name), testData) }, - InstanceCheck(c ⇒ - (value, _) ⇒ value.asInstanceOf[JsValue].get("type").exists(_.stringValue == c.definition.name))) + InstanceCheck(c => + (value, _) => value.asInstanceOf[JsValue].get("type").exists(_.stringValue == c.definition.name))) def executeAction(given: Given[Any, Any], action: Action) = action match { - case Parse ⇒ + case Parse => import sangria.parser.DeliveryScheme.Either ParsingResult(QueryParser.parse(given.query).left.map(_.asInstanceOf[SangriaSyntaxError])) - case Validate(rules) ⇒ + case Validate(rules) => import sangria.parser.DeliveryScheme.Throw ValidationResult(new RuleBasedQueryValidator(rules.toList).validateQuery(given.schema, QueryParser.parse(given.query))) - case Execute(validate, value, vars, op) ⇒ + case Execute(validate, value, vars, op) => import sangria.parser.DeliveryScheme.Throw val validator = if (validate) QueryValidator.default else QueryValidator.empty @@ -183,44 +183,44 @@ object CatsScenarioExecutor extends FutureResultSupport { variables = vars, operationName = op, exceptionHandler = exceptionHandler).await)) - case a ⇒ + case a => throw new IllegalStateException(s"Not yet supported action: $a") } val exceptionHandler = ExceptionHandler { - case (_, e: ResolveException) ⇒ HandledException(e.getMessage) + case (_, e: ResolveException) => HandledException(e.getMessage) } def resolveRef(value: JsValue, testData: JsValue) = value match { - case JsObject(fields) if fields.keySet == Set("$ref") ⇒ + case JsObject(fields) if fields.keySet == Set("$ref") => val name = fields("$ref").stringValue testData(name) - case v ⇒ v + case v => v } def extractCorrectValue(tpe: OutputType[_], value: Option[JsValue], testData: JsValue): Any = tpe match { - case OptionType(ofType) ⇒ Option(extractCorrectValue(ofType, value, testData)) - case _ if value.isEmpty || value.get == JsNull ⇒ null - case ListType(ofType) ⇒ value.get.arrayValue map (v ⇒ extractCorrectValue(ofType, Option(v), testData)) - case t: ScalarType[_] if t eq BooleanType ⇒ resolveRef(value.get, testData).booleanValue - case t: ScalarType[_] if t eq StringType ⇒ resolveRef(value.get, testData).stringValue - case t: ScalarType[_] if t eq IntType ⇒ resolveRef(value.get, testData).intValue - case t: CompositeType[_] ⇒ resolveRef(value.get, testData).asJsObject - case t ⇒ throw new IllegalStateException(s"Builder for type '$t' is not supported yet.") + case OptionType(ofType) => Option(extractCorrectValue(ofType, value, testData)) + case _ if value.isEmpty || value.get == JsNull => null + case ListType(ofType) => value.get.arrayValue map (v => extractCorrectValue(ofType, Option(v), testData)) + case t: ScalarType[_] if t eq BooleanType => resolveRef(value.get, testData).booleanValue + case t: ScalarType[_] if t eq StringType => resolveRef(value.get, testData).stringValue + case t: ScalarType[_] if t eq IntType => resolveRef(value.get, testData).intValue + case t: CompositeType[_] => resolveRef(value.get, testData).asJsObject + case t => throw new IllegalStateException(s"Builder for type '$t' is not supported yet.") } def correctValue(tpe: OutputType[_], value: Any): Any = tpe match { - case OptionType(_) ⇒ Option(value) - case _ ⇒ value + case OptionType(_) => Option(value) + case _ => value } def replacePlaceholders(template: String, args: Args) = args.raw.keys.foldLeft(template) { - case (acc, key) ⇒ acc.replaceAll("\\$" + key, args.arg[Any](key) match { - case Some(v) ⇒ "" + v - case None ⇒ "" - case v ⇒ "" + v + case (acc, key) => acc.replaceAll("\\$" + key, args.arg[Any](key) match { + case Some(v) => "" + v + case None => "" + case v => "" + v }) } @@ -243,7 +243,7 @@ object CatsAssertions extends Matchers { withLoc.locations should have size locations.size } - withLoc.locations.zipWithIndex foreach { case (pos, idx) ⇒ + withLoc.locations.zipWithIndex foreach { case (pos, idx) => withClue(s"Violation position mismatch (line: ${locations(idx).line}, column: ${locations(idx).column}): ${violation.errorMessage}") { ErrorLocation(pos.line, pos.column) should be(locations(idx)) } @@ -255,7 +255,7 @@ object CatsAssertions extends Matchers { else { val withLoc = violation.asInstanceOf[AstNodeLocation] - withLoc.locations.size == locations.size && withLoc.locations.zipWithIndex.forall { case (pos, idx) ⇒ + withLoc.locations.size == locations.size && withLoc.locations.zipWithIndex.forall { case (pos, idx) => ErrorLocation(pos.line, pos.column) == locations(idx) } } @@ -268,7 +268,7 @@ object CatsAssertions extends Matchers { actualLocs should have size locations.size } - actualLocs.zipWithIndex foreach { case (pos, idx) ⇒ + actualLocs.zipWithIndex foreach { case (pos, idx) => withClue(s"Violation position mismatch (line: ${locations(idx).line}, column: ${locations(idx).column}): ${error("message").stringValue}") { ErrorLocation(pos("line").intValue, pos("column").intValue) should be(locations(idx)) } @@ -276,27 +276,27 @@ object CatsAssertions extends Matchers { } def assertActionResult(result: Result, assertion: Assertion) = (result, assertion) match { - case (ValidationResult(violations), Passes) ⇒ + case (ValidationResult(violations), Passes) => violations should have size 0 - case (ParsingResult(res), Passes) ⇒ + case (ParsingResult(res), Passes) => withClue("Parsing result was not successful - query contains some syntax errors.") { res.isRight should be (true) } - case (ParsingResult(res), SyntaxError) ⇒ + case (ParsingResult(res), SyntaxError) => withClue("Parsing result was successful and does not contain syntax errors.") { res.isLeft should be (true) } - case (ValidationResult(violations), ErrorsCount(count)) ⇒ + case (ValidationResult(violations), ErrorsCount(count)) => violations should have size count - case (ExecutionResult(value), ErrorsCount(count)) ⇒ + case (ExecutionResult(value), ErrorsCount(count)) => value.get.get("errors").map(_.arrayValue).getOrElse(Vector.empty) should have size count - case (ValidationResult(violations), ErrorsContain(message, locations)) ⇒ + case (ValidationResult(violations), ErrorsContain(message, locations)) => message match { - case Left(text) ⇒ + case Left(text) => val v = withClue(s"Can't find error message: $text") { val v = violations.find(_.errorMessage.contains(text)) - withClue(s"Actual violations:${violations map (v ⇒ " * " + v.errorMessage) mkString ("\n", "\n", "\n")}") { + withClue(s"Actual violations:${violations map (v => " * " + v.errorMessage) mkString ("\n", "\n", "\n")}") { v should not be 'empty } @@ -304,11 +304,11 @@ object CatsAssertions extends Matchers { } assertLocations(v.get, locations) - case Right(pattern) ⇒ + case Right(pattern) => val v = withClue(s"Can't find error pattern: $pattern") { - val v = violations.find(v ⇒ pattern.matcher(v.errorMessage).matches) + val v = violations.find(v => pattern.matcher(v.errorMessage).matches) - withClue(s"Actual violations:${violations map (v ⇒ " * " + v.errorMessage) mkString ("\n", "\n", "\n")}") { + withClue(s"Actual violations:${violations map (v => " * " + v.errorMessage) mkString ("\n", "\n", "\n")}") { v should not be 'empty } v @@ -317,34 +317,34 @@ object CatsAssertions extends Matchers { assertLocations(v.get, locations) } - case (ValidationResult(violations), ErrorCode(code, args, locations)) ⇒ - withClue(s"Can't find error code '$code'${if (args.nonEmpty) s" with args: ${args.map{case (k, v) ⇒ k + " = " + v}.mkString(", ")}" else ""}${if (locations.nonEmpty) s" ${locations.map{case l ⇒ l.line + ":" + l.column}.mkString("(",", ", ")")}" else ""}.") { - val v = violations.collect {case v: SpecViolation ⇒ v}.find(v ⇒ v.code == code && v.args == args && sameLocations(v, locations)) + case (ValidationResult(violations), ErrorCode(code, args, locations)) => + withClue(s"Can't find error code '$code'${if (args.nonEmpty) s" with args: ${args.map{case (k, v) => k + " = " + v}.mkString(", ")}" else ""}${if (locations.nonEmpty) s" ${locations.map{case l => l.line + ":" + l.column}.mkString("(",", ", ")")}" else ""}.") { + val v = violations.collect {case v: SpecViolation => v}.find(v => v.code == code && v.args == args && sameLocations(v, locations)) - withClue(s"Actual violations:\n${violations map (v ⇒ "* " + s"[${v.getClass.getSimpleName}] " + v.errorMessage) mkString("\n", "\n", "\n")}") { + withClue(s"Actual violations:\n${violations map (v => "* " + s"[${v.getClass.getSimpleName}] " + v.errorMessage) mkString("\n", "\n", "\n")}") { v should not be 'empty } } - case (ExecutionResult(res), ExceptionContain(message)) ⇒ + case (ExecutionResult(res), ExceptionContain(message)) => res match { - case Failure(error) ⇒ + case Failure(error) => message match { - case Left(text) ⇒ error.getMessage should include (text) - case Right(pattern) ⇒ + case Left(text) => error.getMessage should include (text) + case Right(pattern) => withClue(s"Message '${error.getMessage}' does not match the pattern: $pattern") { pattern.matcher(error.getMessage).matches should be ("true") } } - case Success(res) ⇒ + case Success(res) => fail("Execution was successful: " + res) } - case (ExecutionResult(value), ErrorsContain(message, locations)) ⇒ + case (ExecutionResult(value), ErrorsContain(message, locations)) => val errors = value.get.get("errors") map (_.arrayValue) getOrElse Vector.empty message match { - case Left(text) ⇒ + case Left(text) => val v = withClue(s"Can't find error message: $text") { val v = errors.find(_("message").stringValue.contains(text)) @@ -353,9 +353,9 @@ object CatsAssertions extends Matchers { } assertLocations(v.get, locations) - case Right(pattern) ⇒ + case Right(pattern) => val v = withClue(s"Can't find error pattern: $pattern") { - val v = errors.find(v ⇒ pattern.matcher(v("message").stringValue).matches) + val v = errors.find(v => pattern.matcher(v("message").stringValue).matches) v should not be ('empty) v @@ -364,12 +364,12 @@ object CatsAssertions extends Matchers { assertLocations(v.get, locations) } - case (ExecutionResult(actual), Data(expected)) ⇒ + case (ExecutionResult(actual), Data(expected)) => withClue("Result: " + actual) { actual.get("data") should be (expected) } - case a ⇒ throw new IllegalStateException(s"Not yet supported assertion: $a") + case a => throw new IllegalStateException(s"Not yet supported assertion: $a") } } @@ -414,31 +414,31 @@ object CatsScenarioData { def getSchema(value: YamlValue, path: String): Option[ast.Document] = value.get("schema") - .map { v ⇒ + .map { v => import sangria.parser.DeliveryScheme.Throw QueryParser.parse(v.stringValue) } - .orElse(value.get("schema-file").map(f ⇒ FileUtil.loadSchema(path + "/" + f.stringValue))) + .orElse(value.get("schema-file").map(f => FileUtil.loadSchema(path + "/" + f.stringValue))) def getTestData(value: Option[YamlValue], path: String) = value .flatMap(_.get("test-data") map convertToJson) .orElse( - value.flatMap(_.get("test-data-file")).map(f ⇒ FileUtil.loadTestData(path + "/" + f.stringValue) match { - case Right(json) ⇒ json - case Left(yaml) ⇒ convertToJson(yaml) + value.flatMap(_.get("test-data-file")).map(f => FileUtil.loadTestData(path + "/" + f.stringValue) match { + case Right(json) => json + case Left(yaml) => convertToJson(yaml) })) def getAction(value: YamlValue, testName: String, testData: JsValue): Action = { val when = value("when") when.get("validate") - .map(v ⇒ Validate(v.arrayValue.toList.map(name ⇒ QueryValidator.allRules.find(_.getClass.getSimpleName == name.stringValue) getOrElse (throw new IllegalStateException(s"Can't find the validation rule: $name"))))) + .map(v => Validate(v.arrayValue.toList.map(name => QueryValidator.allRules.find(_.getClass.getSimpleName == name.stringValue) getOrElse (throw new IllegalStateException(s"Can't find the validation rule: $name"))))) .orElse { - when.get("execute").map { e ⇒ + when.get("execute").map { e => val validate = e.get("validate-query").map(_.booleanValue) getOrElse true - val value = e.get("test-value").map(name ⇒ testData(name.stringValue)) getOrElse JsNull + val value = e.get("test-value").map(name => testData(name.stringValue)) getOrElse JsNull val variables = e.get("variables") map convertToJson getOrElse JsObject.empty val operationName = e.get("operation-name") map (_.stringValue) @@ -446,38 +446,38 @@ object CatsScenarioData { } } .orElse { - when.get("parse").map(_ ⇒ Parse) + when.get("parse").map(_ => Parse) } .getOrElse(throw new IllegalStateException(s"Can't find action: $testName")) } def getErrorLocation(value: YamlValue) = value match { - case YamlArray(elems) ⇒ ErrorLocation(elems(0).intValue, elems(1).intValue) - case obj ⇒ ErrorLocation(obj("line").intValue, obj("column").intValue) + case YamlArray(elems) => ErrorLocation(elems(0).intValue, elems(1).intValue) + case obj => ErrorLocation(obj("line").intValue, obj("column").intValue) } def getErrorArgs(value: YamlValue) = value.get("args") match { - case Some(YamlObject(elems)) ⇒ elems.map {case (key, value) ⇒ key.stringValue → value.stringValue} - case _ ⇒ Map.empty[String, String] + case Some(YamlObject(elems)) => elems.map {case (key, value) => key.stringValue -> value.stringValue} + case _ => Map.empty[String, String] } def getErrorLocations(value: YamlValue) = value.get("loc") match { - case Some(YamlArray(values)) ⇒ values map getErrorLocation - case Some(value) ⇒ Vector(getErrorLocation(value)) - case None ⇒ Vector.empty + case Some(YamlArray(values)) => values map getErrorLocation + case Some(value) => Vector(getErrorLocation(value)) + case None => Vector.empty } def getAssertion(value: YamlValue, testName: String): Assertion = { - value.get("passes").map(_ ⇒ Passes) - .orElse(value.get("error-count").map(v ⇒ ErrorsCount(v.intValue))) - .orElse(value.get("error").map(v ⇒ ErrorsContain(Left(v.stringValue), getErrorLocations(value).toList))) - .orElse(value.get("error-regex").map(v ⇒ ErrorsContain(Right(v.stringValue.r.pattern), getErrorLocations(value).toList))) - .orElse(value.get("error-code").map(v ⇒ ErrorCode(v.stringValue, getErrorArgs(value), getErrorLocations(value).toList))) - .orElse(value.get("exception").map(v ⇒ ExceptionContain(Left(v.stringValue)))) - .orElse(value.get("exception-regex").map(v ⇒ ExceptionContain(Right(v.stringValue.r.pattern)))) - .orElse(value.get("data").map(v ⇒ Data(convertToJson(v)))) - .orElse(value.get("syntax-error").map(_ ⇒ SyntaxError)) + value.get("passes").map(_ => Passes) + .orElse(value.get("error-count").map(v => ErrorsCount(v.intValue))) + .orElse(value.get("error").map(v => ErrorsContain(Left(v.stringValue), getErrorLocations(value).toList))) + .orElse(value.get("error-regex").map(v => ErrorsContain(Right(v.stringValue.r.pattern), getErrorLocations(value).toList))) + .orElse(value.get("error-code").map(v => ErrorCode(v.stringValue, getErrorArgs(value), getErrorLocations(value).toList))) + .orElse(value.get("exception").map(v => ExceptionContain(Left(v.stringValue)))) + .orElse(value.get("exception-regex").map(v => ExceptionContain(Right(v.stringValue.r.pattern)))) + .orElse(value.get("data").map(v => Data(convertToJson(v)))) + .orElse(value.get("syntax-error").map(_ => SyntaxError)) .getOrElse(throw new IllegalStateException(s"Can't find the assertion: $testName")) } @@ -485,9 +485,9 @@ object CatsScenarioData { val thenWord = value("then") thenWord match { - case YamlArray(elems) ⇒ + case YamlArray(elems) => elems map (getAssertion(_, testName)) - case other ⇒ + case other => Vector(getAssertion(other, testName)) } } @@ -506,23 +506,23 @@ object CatsScenarioData { object JsonAndYamlHelpers { implicit class YamlOps(value: YamlValue) { def get(key: String) = value match { - case YamlObject(fields) ⇒ fields.get(YamlString(key)) - case _ ⇒ None + case YamlObject(fields) => fields.get(YamlString(key)) + case _ => None } def apply(key: String) = get(key).get def stringValue = value.asInstanceOf[YamlString].value def arrayValue = value.asInstanceOf[YamlArray].elements def booleanValue = value.asInstanceOf[YamlBoolean].boolean def intValue = value.asInstanceOf[YamlNumber].value match { - case i if i.isValidInt ⇒ i.intValue - case v ⇒ throw new IllegalArgumentException(s"Unsupported Int '$v' of class '${v.getClass}'.") + case i if i.isValidInt => i.intValue + case v => throw new IllegalArgumentException(s"Unsupported Int '$v' of class '${v.getClass}'.") } } implicit class JsonOps(value: JsValue) { def get(key: String) = value match { - case JsObject(fields) ⇒ fields.get(key) - case _ ⇒ None + case JsObject(fields) => fields.get(key) + case _ => None } def apply(key: String) = get(key).get def stringValue = value.asInstanceOf[JsString].value @@ -532,12 +532,12 @@ object JsonAndYamlHelpers { } def convertToJson(value: YamlValue): JsValue = value match { - case YamlArray(elems) ⇒ JsArray(elems map convertToJson) - case YamlObject(fields) ⇒ JsObject(fields.map {case (k, v) ⇒ k.stringValue → convertToJson(v)}) - case YamlBoolean(v) ⇒ JsBoolean(v) - case YamlString(v) ⇒ JsString(v) - case YamlNumber(v: BigDecimal) ⇒ JsNumber(v) - case YamlNull ⇒ JsNull - case v ⇒ throw new IllegalStateException(s"Yaml value is not supported in conversion: $v") + case YamlArray(elems) => JsArray(elems map convertToJson) + case YamlObject(fields) => JsObject(fields.map {case (k, v) => k.stringValue -> convertToJson(v)}) + case YamlBoolean(v) => JsBoolean(v) + case YamlString(v) => JsString(v) + case YamlNumber(v: BigDecimal) => JsNumber(v) + case YamlNull => JsNull + case v => throw new IllegalStateException(s"Yaml value is not supported in conversion: $v") } -} \ No newline at end of file +} diff --git a/src/test/scala/sangria/util/DebugUtil.scala b/src/test/scala/sangria/util/DebugUtil.scala index 6a36bcdf..7425c207 100644 --- a/src/test/scala/sangria/util/DebugUtil.scala +++ b/src/test/scala/sangria/util/DebugUtil.scala @@ -10,7 +10,7 @@ import spray.json.JsValue object DebugUtil { private val indentClasses: PartialFunction[Any, Boolean] = { - case v if v.getClass.getSimpleName.startsWith("Introspection") ⇒ true + case v if v.getClass.getSimpleName.startsWith("Introspection") => true case _: Document | _: InputDocument | _: Definition | @@ -20,7 +20,7 @@ object DebugUtil { _: ObjectValue | _: ObjectField | _: ListValue | - _: Argument ⇒ true + _: Argument => true } private val myPrettifier: Prettifier = @@ -30,42 +30,42 @@ object DebugUtil { def loop(obj: Any, level: Int, indentLists: Boolean = false, indentMap: Boolean = false): String = obj match { - case null ⇒ "null" - case json: JsValue ⇒ json.prettyPrint - case aString: String ⇒ "\"" + StringUtil.escapeString(aString) + "\"" - case aStringWrapper: scala.collection.immutable.StringOps ⇒ "\"" + aStringWrapper + "\"" - case aChar: Char ⇒ "\'" + aChar + "\'" - case ot: OperationType ⇒ "OperationType." + ot - case aGenMap: GenMap[_, _] ⇒ + case null => "null" + case json: JsValue => json.prettyPrint + case aString: String => "\"" + StringUtil.escapeString(aString) + "\"" + case aStringWrapper: scala.collection.immutable.StringOps => "\"" + aStringWrapper + "\"" + case aChar: Char => "\'" + aChar + "\'" + case ot: OperationType => "OperationType." + ot + case aGenMap: GenMap[_, _] => (if (indentMap) indent(level + 1) else "") + "Map(\n" + - aGenMap.toIterator.map { case (key, value) ⇒ - indent(level + 1) + loop(key, level) + " → " + loop(value, level + 1, indentMap = false, indentLists = true) + aGenMap.toIterator.map { case (key, value) => + indent(level + 1) + loop(key, level) + " -> " + loop(value, level + 1, indentMap = false, indentLists = true) }.mkString(",\n") + ")" - case list: scala.collection.immutable.List[_] ⇒ + case list: scala.collection.immutable.List[_] => if (list.isEmpty) "Nil" else if (indentLists) - "List(\n" + list.map(x ⇒ indent(level + 1) + loop(x, level + 1)).mkString(",\n") + ")" + "List(\n" + list.map(x => indent(level + 1) + loop(x, level + 1)).mkString(",\n") + ")" else - "List(" + list.map(x ⇒ loop(x, level)).mkString(", ") + ")" - case list: scala.collection.immutable.Vector[_] ⇒ + "List(" + list.map(x => loop(x, level)).mkString(", ") + ")" + case list: scala.collection.immutable.Vector[_] => if (list.isEmpty) "Vector.empty" else if (indentLists) - "Vector(\n" + list.map(x ⇒ indent(level + 1) + loop(x, level + 1)).mkString(",\n") + ")" + "Vector(\n" + list.map(x => indent(level + 1) + loop(x, level + 1)).mkString(",\n") + ")" else - "Vector(" + list.map(x ⇒ loop(x, level)).mkString(", ") + ")" - case prod: Product ⇒ + "Vector(" + list.map(x => loop(x, level)).mkString(", ") + ")" + case prod: Product => val args = prod.productIterator.toList if (args.nonEmpty) if (indentClasses.isDefinedAt(prod) && indentClasses(prod)) - prod.productPrefix + "(\n" + args.map(x ⇒ indent(level + 1) + loop(x, level + 1, true)).mkString(",\n") + "\n" + indent(level) + ")" + prod.productPrefix + "(\n" + args.map(x => indent(level + 1) + loop(x, level + 1, true)).mkString(",\n") + "\n" + indent(level) + ")" else - prod.productPrefix + "(" + args.map(x ⇒ loop(x, level, false)).mkString(", ") + ")" + prod.productPrefix + "(" + args.map(x => loop(x, level, false)).mkString(", ") + ")" else prod.productPrefix - case anythingElse ⇒ + case anythingElse => anythingElse.toString } diff --git a/src/test/scala/sangria/util/FileUtil.scala b/src/test/scala/sangria/util/FileUtil.scala index 4e2f5afc..f47f1640 100644 --- a/src/test/scala/sangria/util/FileUtil.scala +++ b/src/test/scala/sangria/util/FileUtil.scala @@ -30,7 +30,7 @@ object FileUtil extends StringMatchers { .values .toVector - yamlResources.map { resource ⇒ + yamlResources.map { resource => val name = resource.getPath.substring(resource.getPath.lastIndexOf("/") + 1) val relativePath = resource.getPathRelativeToClasspathElement val stream = this.getClass.getResourceAsStream("/" + relativePath) @@ -53,8 +53,8 @@ object FileUtil extends StringMatchers { def loadResource(path: String) = Option(this.getClass.getResourceAsStream("/" + path)) match { - case Some(res) ⇒ stripCarriageReturns(Source.fromInputStream(res, "UTF-8").mkString) - case None ⇒ throw new IllegalArgumentException("Resource not found: /" + path) + case Some(res) => stripCarriageReturns(Source.fromInputStream(res, "UTF-8").mkString) + case None => throw new IllegalArgumentException("Resource not found: /" + path) } case class ScenarioFile(fileName: String, path: String, scenario: YamlValue) { diff --git a/src/test/scala/sangria/util/FutureResultSupport.scala b/src/test/scala/sangria/util/FutureResultSupport.scala index 2a10d9df..300be6bd 100644 --- a/src/test/scala/sangria/util/FutureResultSupport.scala +++ b/src/test/scala/sangria/util/FutureResultSupport.scala @@ -16,13 +16,13 @@ trait FutureResultSupport { def awaitAndRecoverQueryAnalysis(implicit m: ResultMarshallerForType[T]): T = Await.result(recoverQueryAnalysis, 10 seconds) def recoverQueryAnalysis(implicit m: ResultMarshallerForType[T]): Future[T] = f.recover { - case analysisError: QueryAnalysisError ⇒ analysisError.resolveError(m.marshaller).asInstanceOf[T] + case analysisError: QueryAnalysisError => analysisError.resolveError(m.marshaller).asInstanceOf[T] } def awaitAndRecoverQueryAnalysisScala(implicit ev: T =:= Any) = Await.result(recoverQueryAnalysisScala, 10 seconds) def recoverQueryAnalysisScala(implicit ev: T =:= Any) = f.recover { - case analysisError: ErrorWithResolver ⇒ analysisError.resolveError + case analysisError: ErrorWithResolver => analysisError.resolveError } } diff --git a/src/test/scala/sangria/util/GraphQlSupport.scala b/src/test/scala/sangria/util/GraphQlSupport.scala index 752e451a..c109b63d 100644 --- a/src/test/scala/sangria/util/GraphQlSupport.scala +++ b/src/test/scala/sangria/util/GraphQlSupport.scala @@ -19,7 +19,7 @@ object SimpleGraphQlSupport extends FutureResultSupport with Matchers { val Success(doc) = QueryParser.parse(query) val exceptionHandler = ExceptionHandler { - case (m, e) ⇒ HandledException(e.getMessage) + case (m, e) => HandledException(e.getMessage) } Executor.execute( @@ -52,7 +52,7 @@ object SimpleGraphQlSupport extends FutureResultSupport with Matchers { errors should have size expectedErrors.size - expectedErrors foreach (expected ⇒ errors should contain (expected)) + expectedErrors foreach (expected => errors should contain (expected)) } def checkContainsErrors[T]( @@ -76,13 +76,13 @@ object SimpleGraphQlSupport extends FutureResultSupport with Matchers { val errors = result.getOrElse("errors", Vector.empty).asInstanceOf[Seq[Map[String, Any]]] val violations = - errors.map { error ⇒ + errors.map { error => val message = error("message").asInstanceOf[String] val locs = error.get("locations") match { - case Some(locs: Seq[Map[String, Any]] @unchecked) ⇒ - locs.map(loc ⇒ AstLocation(0, loc("line").asInstanceOf[Int], loc("column").asInstanceOf[Int])).toList - case _ ⇒ Nil + case Some(locs: Seq[Map[String, Any]] @unchecked) => + locs.map(loc => AstLocation(0, loc("line").asInstanceOf[Int], loc("column").asInstanceOf[Int])).toList + case _ => Nil } StubViolation(message, None, locs) @@ -94,16 +94,16 @@ object SimpleGraphQlSupport extends FutureResultSupport with Matchers { def renderViolations(violations: Vector[Violation]) = { val renderedHelpers = violations.zipWithIndex.map { - case (v, idx) ⇒ + case (v, idx) => v match { - case n: AstNodeLocation ⇒ "\"" + n.simpleErrorMessage + "\" → Seq(" + n.locations.map(l ⇒ s"Pos(${l.line}, ${l.column})").mkString(", ") + ")" - case n ⇒ n.errorMessage + case n: AstNodeLocation => "\"" + n.simpleErrorMessage + "\" -> Seq(" + n.locations.map(l => s"Pos(${l.line}, ${l.column})").mkString(", ") + ")" + case n => n.errorMessage } }.mkString(",\n") val rendered = violations.zipWithIndex.map { - case (v, idx) ⇒ s"(${idx + 1}) " + v.errorMessage + case (v, idx) => s"(${idx + 1}) " + v.errorMessage }.mkString("\n\n") "Actual violations:\n\n" + renderedHelpers + "\n\n" + rendered + "\n\n" @@ -114,15 +114,15 @@ object SimpleGraphQlSupport extends FutureResultSupport with Matchers { errors should have size expectedErrors.size } - expectedErrors foreach { case(expected, pos) ⇒ - withClue(s"Expected error not found: $expected${pos map (p ⇒ s" (line ${p.line}, column ${p.col})") mkString "; "}. ${renderViolations(errors)}") { - errors exists { error ⇒ + expectedErrors foreach { case(expected, pos) => + withClue(s"Expected error not found: $expected${pos map (p => s" (line ${p.line}, column ${p.col})") mkString "; "}. ${renderViolations(errors)}") { + errors exists { error => error.errorMessage.contains(expected) && { val errorPositions = error.asInstanceOf[AstNodeViolation].locations errorPositions should have size pos.size - errorPositions zip pos forall { case (actualPos, expectedPos) ⇒ + errorPositions zip pos forall { case (actualPos, expectedPos) => expectedPos.line == actualPos.line && expectedPos.col == actualPos.column } } @@ -131,7 +131,7 @@ object SimpleGraphQlSupport extends FutureResultSupport with Matchers { } } - def checkContainsViolations(execute: ⇒ Unit, expected: (String, Seq[Pos])*) = + def checkContainsViolations(execute: => Unit, expected: (String, Seq[Pos])*) = assertViolations(intercept [WithViolations] (execute).violations, expected: _*) @@ -156,4 +156,4 @@ trait GraphQlSupport extends FutureResultSupport with Matchers { SimpleGraphQlSupport.checkContainsErrors(schema, data, query, expectedData, expectedErrorStrings, args = args, validateQuery = validateQuery) } -case class Pos(line: Int, col: Int) \ No newline at end of file +case class Pos(line: Int, col: Int) diff --git a/src/test/scala/sangria/util/OutputMatchers.scala b/src/test/scala/sangria/util/OutputMatchers.scala index 36513238..1b63dde5 100644 --- a/src/test/scala/sangria/util/OutputMatchers.scala +++ b/src/test/scala/sangria/util/OutputMatchers.scala @@ -5,7 +5,7 @@ import java.io.{PrintStream, ByteArrayOutputStream} import org.scalatest.Matchers trait OutputMatchers extends Matchers { - def captureStdErr(fn: ⇒ Unit) = { + def captureStdErr(fn: => Unit) = { val output = new ByteArrayOutputStream() val printStream = new PrintStream(output) val oldErr = System.err @@ -22,7 +22,7 @@ trait OutputMatchers extends Matchers { output.toString("UTF-8") } - def captureConsoleOut(fn: ⇒ Unit) = { + def captureConsoleOut(fn: => Unit) = { val output = new ByteArrayOutputStream() Console.withOut(output) { diff --git a/src/test/scala/sangria/util/ValidationSupport.scala b/src/test/scala/sangria/util/ValidationSupport.scala index d115ef68..41cd90a4 100644 --- a/src/test/scala/sangria/util/ValidationSupport.scala +++ b/src/test/scala/sangria/util/ValidationSupport.scala @@ -12,17 +12,17 @@ trait ValidationSupport extends Matchers { type TestField = Field[Unit, Unit] val Being = InterfaceType("Being", List[TestField]( - Field("name", OptionType(StringType), resolve = _ ⇒ None) + Field("name", OptionType(StringType), resolve = _ => None) )) val Pet = InterfaceType("Pet", List[TestField]( - Field("name", OptionType(StringType), resolve = _ ⇒ None) + Field("name", OptionType(StringType), resolve = _ => None) )) val Canine = InterfaceType("Canine", List[TestField]( Field("name", OptionType(StringType), arguments = Argument("surname", OptionInputType(BooleanType)) :: Nil, - resolve = _ ⇒ None) + resolve = _ => None) )) val DogCommand = EnumType("DogCommand", values = List( @@ -41,42 +41,42 @@ trait ValidationSupport extends Matchers { val Dog = ObjectType("Dog", interfaces[Unit, Unit](Being, Pet, Canine), List[TestField]( Field("name", OptionType(StringType), arguments = Argument("surname", OptionInputType(BooleanType)) :: Nil, - resolve = _ ⇒ None), - Field("nickname", OptionType(StringType), resolve = _ ⇒ None), - Field("barks", OptionType(BooleanType), resolve = _ ⇒ None), - Field("barkVolume", OptionType(IntType), resolve = _ ⇒ None), + resolve = _ => None), + Field("nickname", OptionType(StringType), resolve = _ => None), + Field("barks", OptionType(BooleanType), resolve = _ => None), + Field("barkVolume", OptionType(IntType), resolve = _ => None), Field("doesKnowCommand", OptionType(BooleanType), arguments = Argument("dogCommand", OptionInputType(DogCommand)) :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("isHousetrained", OptionType(BooleanType), arguments = Argument("atOtherHomes", OptionInputType(BooleanType), true) :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("isAtLocation", OptionType(BooleanType), arguments = Argument("x", OptionInputType(IntType)) :: Argument("y", OptionInputType(IntType)) :: Nil, - resolve = _ ⇒ None))) + resolve = _ => None))) val Cat = ObjectType("Cat", interfaces[Unit, Unit](Being, Pet), List[TestField]( - Field("name", OptionType(StringType), resolve = _ ⇒ None), - Field("nickname", OptionType(StringType), resolve = _ ⇒ None), - Field("meows", OptionType(BooleanType), resolve = _ ⇒ None), - Field("meowVolume", OptionType(IntType), resolve = _ ⇒ None), - Field("furColor", OptionType(FurColor), resolve = _ ⇒ None))) + Field("name", OptionType(StringType), resolve = _ => None), + Field("nickname", OptionType(StringType), resolve = _ => None), + Field("meows", OptionType(BooleanType), resolve = _ => None), + Field("meowVolume", OptionType(IntType), resolve = _ => None), + Field("furColor", OptionType(FurColor), resolve = _ => None))) val CatOrDog = UnionType("CatOrDog", types = Dog :: Cat :: Nil) val Intelligent = InterfaceType("Intelligent", List[TestField]( - Field("iq", OptionType(IntType), resolve = _ ⇒ None) + Field("iq", OptionType(IntType), resolve = _ => None) )) - val Human: ObjectType[Unit, Unit] = ObjectType("Human", interfaces[Unit, Unit](Being, Intelligent), () ⇒ List[TestField]( + val Human: ObjectType[Unit, Unit] = ObjectType("Human", interfaces[Unit, Unit](Being, Intelligent), () => List[TestField]( Field("name", OptionType(StringType), arguments = Argument("surname", OptionInputType(BooleanType)) :: Nil, - resolve = _ ⇒ None), - Field("pets", OptionType(ListType(OptionType(Pet))), resolve = _ ⇒ None), - Field("relatives", OptionType(ListType(OptionType(Human))), resolve = _ ⇒ None))) + resolve = _ => None), + Field("pets", OptionType(ListType(OptionType(Pet))), resolve = _ => None), + Field("relatives", OptionType(ListType(OptionType(Human))), resolve = _ => None))) val Alien = ObjectType("Alien", interfaces[Unit, Unit](Being, Intelligent), List[TestField]( - Field("numEyes", OptionType(IntType), resolve = _ ⇒ None))) + Field("numEyes", OptionType(IntType), resolve = _ => None))) val DogOrHuman = UnionType("DogOrHuman", types = Dog :: Human :: Nil) @@ -94,46 +94,46 @@ trait ValidationSupport extends Matchers { val ComplicatedArgs = ObjectType("ComplicatedArgs", List[TestField]( Field("intArgField", OptionType(StringType), arguments = Argument("intArg", OptionInputType(IntType)) :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("bigIntArgField", OptionType(StringType), arguments = Argument("bigIntArg", OptionInputType(BigIntType)) :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("nonNullIntArgField", OptionType(StringType), arguments = Argument("nonNullIntArg", IntType) :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("stringArgField", OptionType(StringType), arguments = Argument("stringArg", OptionInputType(StringType)) :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("booleanArgField", OptionType(StringType), arguments = Argument("booleanArg", OptionInputType(BooleanType)) :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("enumArgField", OptionType(StringType), arguments = Argument("enumArg", OptionInputType(FurColor)) :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("floatArgField", OptionType(StringType), arguments = Argument("floatArg", OptionInputType(FloatType)) :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("bigDecimalArgField", OptionType(StringType), arguments = Argument("bigDecimalArg", OptionInputType(BigDecimalType)) :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("idArgField", OptionType(StringType), arguments = Argument("idArg", OptionInputType(IDType)) :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("stringListArgField", OptionType(StringType), arguments = Argument("stringListArg", OptionInputType(ListInputType(OptionInputType(StringType)))) :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("complexArgField", OptionType(StringType), arguments = Argument("complexArg", OptionInputType(ComplexInput)) :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("multipleReqs", OptionType(StringType), arguments = Argument("req1", IntType) :: Argument("req2", IntType) :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("nonNullFieldWithDefault", OptionType(StringType), arguments = Argument("arg", IntType, 0) :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("multipleOpts", OptionType(StringType), arguments = Argument("opt1", OptionInputType(IntType), 0) :: Argument("opt2", OptionInputType(IntType), 0) :: Nil, - resolve = _ ⇒ None), + resolve = _ => None), Field("multipleOptAndReq", OptionType(StringType), arguments = Argument("req1", IntType) :: @@ -141,21 +141,21 @@ trait ValidationSupport extends Matchers { Argument("opt1", OptionInputType(IntType), 0) :: Argument("opt2", OptionInputType(IntType), 0) :: Nil, - resolve = _ ⇒ None) + resolve = _ => None) )) val QueryRoot = ObjectType("QueryRoot", List[TestField]( Field("human", OptionType(Human), arguments = Argument("id", OptionInputType(IDType)) :: Nil, - resolve = _ ⇒ None), - Field("alien", OptionType(Alien), resolve = _ ⇒ None), - Field("dog", OptionType(Dog), resolve = _ ⇒ None), - Field("cat", OptionType(Cat), resolve = _ ⇒ None), - Field("pet", OptionType(Pet), resolve = _ ⇒ None), - Field("catOrDog", OptionType(CatOrDog), resolve = _ ⇒ None), - Field("dogOrHuman", OptionType(DogOrHuman), resolve = _ ⇒ None), - Field("humanOrAlien", OptionType(HumanOrAlien), resolve = _ ⇒ None), - Field("complicatedArgs", OptionType(ComplicatedArgs), resolve = _ ⇒ None) + resolve = _ => None), + Field("alien", OptionType(Alien), resolve = _ => None), + Field("dog", OptionType(Dog), resolve = _ => None), + Field("cat", OptionType(Cat), resolve = _ => None), + Field("pet", OptionType(Pet), resolve = _ => None), + Field("catOrDog", OptionType(CatOrDog), resolve = _ => None), + Field("dogOrHuman", OptionType(DogOrHuman), resolve = _ => None), + Field("humanOrAlien", OptionType(HumanOrAlien), resolve = _ => None), + Field("complicatedArgs", OptionType(ComplicatedArgs), resolve = _ => None) )) private def alwaysInclude(ctx: DirectiveContext): Boolean = true @@ -222,10 +222,10 @@ trait ValidationSupport extends Matchers { expectValidInput(schema, defaultRule.get :: Nil, query, typeName) def expectFailsRule(rule: ValidationRule, query: String, expectedErrors: List[(String, Option[Pos])]) = - expectInvalid(schema, rule :: Nil, query, expectedErrors.map{case (msg, pos) ⇒ msg → pos.toList}) + expectInvalid(schema, rule :: Nil, query, expectedErrors.map{case (msg, pos) => msg -> pos.toList}) def expectFails(query: String, expectedErrors: List[(String, Option[Pos])]) = - expectInvalid(schema, defaultRule.get :: Nil, query, expectedErrors.map{case (msg, pos) ⇒ msg → pos.toList}) + expectInvalid(schema, defaultRule.get :: Nil, query, expectedErrors.map{case (msg, pos) => msg -> pos.toList}) def expectInputFails(typeName: String, query: String, expectedErrors: List[(String, List[Pos])]) = expectInputInvalid(schema, defaultRule.get :: Nil, query, expectedErrors, typeName) diff --git a/src/test/scala/sangria/validation/DocumentAnalyzerSpec.scala b/src/test/scala/sangria/validation/DocumentAnalyzerSpec.scala index b2f22b0f..923462e5 100644 --- a/src/test/scala/sangria/validation/DocumentAnalyzerSpec.scala +++ b/src/test/scala/sangria/validation/DocumentAnalyzerSpec.scala @@ -13,11 +13,11 @@ class DocumentAnalyzerSpec extends WordSpec with Matchers with StringMatchers { val QueryType = ObjectType("Query", fields[Unit, Unit]( Field("normalField", OptionType(NumberType), arguments = Argument("enumArg", OptionInputType(NumberType)) :: Nil, - resolve = ctx ⇒ ctx.argOpt[Int]("enumArg")), + resolve = ctx => ctx.argOpt[Int]("enumArg")), Field("deprecatedField", OptionType(StringType), deprecationReason = Some("Some field reason."), - resolve = _ ⇒ "foo"))) + resolve = _ => "foo"))) val schema = Schema(QueryType) diff --git a/src/test/scala/sangria/validation/rules/ExecutableDefinitionsSpec.scala b/src/test/scala/sangria/validation/rules/ExecutableDefinitionsSpec.scala index 41ef7f54..62616bb1 100644 --- a/src/test/scala/sangria/validation/rules/ExecutableDefinitionsSpec.scala +++ b/src/test/scala/sangria/validation/rules/ExecutableDefinitionsSpec.scala @@ -48,8 +48,8 @@ class ExecutableDefinitionsSpec extends WordSpec with ValidationSupport { } """, List( - "The 'Cow' definition is not executable." → Some(Pos(8, 9)), - "The 'Dog' definition is not executable" → Some(Pos(12, 9)) + "The 'Cow' definition is not executable." -> Some(Pos(8, 9)), + "The 'Dog' definition is not executable" -> Some(Pos(12, 9)) )) } } diff --git a/src/test/scala/sangria/validation/rules/FieldsOnCorrectTypeSpec.scala b/src/test/scala/sangria/validation/rules/FieldsOnCorrectTypeSpec.scala index 981f0c77..fb69b8b0 100644 --- a/src/test/scala/sangria/validation/rules/FieldsOnCorrectTypeSpec.scala +++ b/src/test/scala/sangria/validation/rules/FieldsOnCorrectTypeSpec.scala @@ -61,7 +61,7 @@ class FieldsOnCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Cannot query field 'meowVolume' on type 'Dog'. Did you mean 'barkVolume'?" → Some(Pos(3, 11)) + "Cannot query field 'meowVolume' on type 'Dog'. Did you mean 'barkVolume'?" -> Some(Pos(3, 11)) )) "Field not defined deeply, only reports first" in expectFails( @@ -73,7 +73,7 @@ class FieldsOnCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Cannot query field 'unknown_field' on type 'Dog'." → Some(Pos(3, 11)) + "Cannot query field 'unknown_field' on type 'Dog'." -> Some(Pos(3, 11)) )) "Sub-field not defined" in expectFails( @@ -85,7 +85,7 @@ class FieldsOnCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Cannot query field 'unknown_field' on type 'Pet'." → Some(Pos(4, 13)) + "Cannot query field 'unknown_field' on type 'Pet'." -> Some(Pos(4, 13)) )) "Field not defined on inline fragment" in expectFails( @@ -97,7 +97,7 @@ class FieldsOnCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Cannot query field 'meowVolume' on type 'Dog'. Did you mean 'barkVolume'?" → Some(Pos(4, 13)) + "Cannot query field 'meowVolume' on type 'Dog'. Did you mean 'barkVolume'?" -> Some(Pos(4, 13)) )) "Aliased field target not defined" in expectFails( @@ -107,7 +107,7 @@ class FieldsOnCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Cannot query field 'mooVolume' on type 'Dog'. Did you mean 'barkVolume'?" → Some(Pos(3, 11)) + "Cannot query field 'mooVolume' on type 'Dog'. Did you mean 'barkVolume'?" -> Some(Pos(3, 11)) )) "Aliased lying field target not defined" in expectFails( @@ -117,7 +117,7 @@ class FieldsOnCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Cannot query field 'kawVolume' on type 'Dog'. Did you mean 'barkVolume'?" → Some(Pos(3, 11)) + "Cannot query field 'kawVolume' on type 'Dog'. Did you mean 'barkVolume'?" -> Some(Pos(3, 11)) )) "Not defined on interface" in expectFails( @@ -127,7 +127,7 @@ class FieldsOnCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Cannot query field 'tailLength' on type 'Pet'." → Some(Pos(3, 11)) + "Cannot query field 'tailLength' on type 'Pet'." -> Some(Pos(3, 11)) )) "Defined on implementors but not on interface" in expectFails( @@ -137,7 +137,7 @@ class FieldsOnCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Cannot query field 'nickname' on type 'Pet'. Did you mean to use an inline fragment on 'Cat' or 'Dog'?" → Some(Pos(3, 11)) + "Cannot query field 'nickname' on type 'Pet'. Did you mean to use an inline fragment on 'Cat' or 'Dog'?" -> Some(Pos(3, 11)) )) "Meta field selection on union" in expectPasses( @@ -154,7 +154,7 @@ class FieldsOnCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Cannot query field 'directField' on type 'CatOrDog'." → Some(Pos(3, 11)) + "Cannot query field 'directField' on type 'CatOrDog'." -> Some(Pos(3, 11)) )) "Defined on implementors queried on union" in expectFails( @@ -164,7 +164,7 @@ class FieldsOnCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Cannot query field 'name' on type 'CatOrDog'. Did you mean to use an inline fragment on 'Being', 'Pet', 'Canine', 'Cat' or 'Dog'?" → Some(Pos(3, 11)) + "Cannot query field 'name' on type 'CatOrDog'. Did you mean to use an inline fragment on 'Being', 'Pet', 'Canine', 'Cat' or 'Dog'?" -> Some(Pos(3, 11)) )) "valid field in inline fragment" in expectPasses( @@ -188,7 +188,7 @@ class FieldsOnCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Cannot query field 'numberOfTails' on type 'Dog'" → Some(Pos(6, 15)) + "Cannot query field 'numberOfTails' on type 'Dog'" -> Some(Pos(6, 15)) )) } diff --git a/src/test/scala/sangria/validation/rules/FragmentsOnCompositeTypesSpec.scala b/src/test/scala/sangria/validation/rules/FragmentsOnCompositeTypesSpec.scala index 06ce4fc7..d12bbfb6 100644 --- a/src/test/scala/sangria/validation/rules/FragmentsOnCompositeTypesSpec.scala +++ b/src/test/scala/sangria/validation/rules/FragmentsOnCompositeTypesSpec.scala @@ -45,7 +45,7 @@ class FragmentsOnCompositeTypesSpec extends WordSpec with ValidationSupport { } """, List( - "Fragment 'scalarFragment' cannot condition on non composite type 'Boolean'." → Some(Pos(2, 36)) + "Fragment 'scalarFragment' cannot condition on non composite type 'Boolean'." -> Some(Pos(2, 36)) )) "enum is invalid fragment type" in expectFails( @@ -55,7 +55,7 @@ class FragmentsOnCompositeTypesSpec extends WordSpec with ValidationSupport { } """, List( - "Fragment 'scalarFragment' cannot condition on non composite type 'FurColor'." → Some(Pos(2, 36)) + "Fragment 'scalarFragment' cannot condition on non composite type 'FurColor'." -> Some(Pos(2, 36)) )) "input object is invalid fragment type" in expectFails( @@ -65,7 +65,7 @@ class FragmentsOnCompositeTypesSpec extends WordSpec with ValidationSupport { } """, List( - "Fragment 'inputFragment' cannot condition on non composite type 'ComplexInput'." → Some(Pos(2, 35)) + "Fragment 'inputFragment' cannot condition on non composite type 'ComplexInput'." -> Some(Pos(2, 35)) )) "scalar is invalid inline fragment type" in expectFails( @@ -77,7 +77,7 @@ class FragmentsOnCompositeTypesSpec extends WordSpec with ValidationSupport { } """, List( - "Fragment cannot condition on non composite type 'String'." → Some(Pos(3, 18)) + "Fragment cannot condition on non composite type 'String'." -> Some(Pos(3, 18)) )) "inline fragment without type is valid" in expectPasses( diff --git a/src/test/scala/sangria/validation/rules/InputDocumentNonConflictingVariableInferenceSpec.scala b/src/test/scala/sangria/validation/rules/InputDocumentNonConflictingVariableInferenceSpec.scala index 8627b37b..b695b745 100644 --- a/src/test/scala/sangria/validation/rules/InputDocumentNonConflictingVariableInferenceSpec.scala +++ b/src/test/scala/sangria/validation/rules/InputDocumentNonConflictingVariableInferenceSpec.scala @@ -26,7 +26,7 @@ class InputDocumentNonConflictingVariableInferenceSpec extends WordSpec with Val } """, List( - "Inferred variable '$foo' is used with two conflicting types: 'Boolean!' and 'String'." → List(Pos(5, 29), Pos(3, 26)) + "Inferred variable '$foo' is used with two conflicting types: 'Boolean!' and 'String'." -> List(Pos(5, 29), Pos(3, 26)) )) "variable used multiple times with incompatible types" in expectInputFails("ComplexInput", @@ -39,9 +39,9 @@ class InputDocumentNonConflictingVariableInferenceSpec extends WordSpec with Val } """, List( - "Inferred variable '$foo' is used with two conflicting types: 'Boolean!' and 'Int'." → List(Pos(4, 21), Pos(3, 26)), - "Inferred variable '$foo' is used with two conflicting types: 'Boolean!' and 'String'." → List(Pos(5, 24), Pos(3, 26)), - "Inferred variable '$foo' is used with two conflicting types: 'Boolean!' and 'String'." → List(Pos(6, 29), Pos(3, 26)) + "Inferred variable '$foo' is used with two conflicting types: 'Boolean!' and 'Int'." -> List(Pos(4, 21), Pos(3, 26)), + "Inferred variable '$foo' is used with two conflicting types: 'Boolean!' and 'String'." -> List(Pos(5, 24), Pos(3, 26)), + "Inferred variable '$foo' is used with two conflicting types: 'Boolean!' and 'String'." -> List(Pos(6, 29), Pos(3, 26)) )) } } diff --git a/src/test/scala/sangria/validation/rules/KnownArgumentNamesSpec.scala b/src/test/scala/sangria/validation/rules/KnownArgumentNamesSpec.scala index 60c7d6fe..472023a7 100644 --- a/src/test/scala/sangria/validation/rules/KnownArgumentNamesSpec.scala +++ b/src/test/scala/sangria/validation/rules/KnownArgumentNamesSpec.scala @@ -73,7 +73,7 @@ class KnownArgumentNamesSpec extends WordSpec with ValidationSupport { } """, List( - "Unknown argument 'unless' on directive 'skip'." → Some(Pos(3, 21)) + "Unknown argument 'unless' on directive 'skip'." -> Some(Pos(3, 21)) )) "misspelled directive args are reported" in expectFails( @@ -83,7 +83,7 @@ class KnownArgumentNamesSpec extends WordSpec with ValidationSupport { } """, List( - "Unknown argument 'iff' on directive 'skip'. Did you mean 'if'?" → Some(Pos(3, 21)) + "Unknown argument 'iff' on directive 'skip'. Did you mean 'if'?" -> Some(Pos(3, 21)) )) "invalid arg name" in expectFails( @@ -93,7 +93,7 @@ class KnownArgumentNamesSpec extends WordSpec with ValidationSupport { } """, List( - "Unknown argument 'unknown' on field 'doesKnowCommand' of type 'Dog'." → Some(Pos(3, 27)) + "Unknown argument 'unknown' on field 'doesKnowCommand' of type 'Dog'." -> Some(Pos(3, 27)) )) "misspelled arg name is reported" in expectFails( @@ -103,7 +103,7 @@ class KnownArgumentNamesSpec extends WordSpec with ValidationSupport { } """, List( - "Unknown argument 'dogcommand' on field 'doesKnowCommand' of type 'Dog'. Did you mean 'dogCommand'?" → Some(Pos(3, 27)) + "Unknown argument 'dogcommand' on field 'doesKnowCommand' of type 'Dog'. Did you mean 'dogCommand'?" -> Some(Pos(3, 27)) )) "unknown args amongst known args" in expectFails( @@ -113,8 +113,8 @@ class KnownArgumentNamesSpec extends WordSpec with ValidationSupport { } """, List( - "Unknown argument 'whoknows' on field 'doesKnowCommand' of type 'Dog'." → Some(Pos(3, 27)), - "Unknown argument 'unknown' on field 'doesKnowCommand' of type 'Dog'." → Some(Pos(3, 57)) + "Unknown argument 'whoknows' on field 'doesKnowCommand' of type 'Dog'." -> Some(Pos(3, 27)), + "Unknown argument 'unknown' on field 'doesKnowCommand' of type 'Dog'." -> Some(Pos(3, 57)) )) "unknown args deeply" in expectFails( @@ -133,8 +133,8 @@ class KnownArgumentNamesSpec extends WordSpec with ValidationSupport { } """, List( - "Unknown argument 'unknown' on field 'doesKnowCommand' of type 'Dog'." → Some(Pos(4, 29)), - "Unknown argument 'unknown' on field 'doesKnowCommand' of type 'Dog'." → Some(Pos(9, 33)) + "Unknown argument 'unknown' on field 'doesKnowCommand' of type 'Dog'." -> Some(Pos(4, 29)), + "Unknown argument 'unknown' on field 'doesKnowCommand' of type 'Dog'." -> Some(Pos(9, 33)) )) } } diff --git a/src/test/scala/sangria/validation/rules/KnownDirectivesSpec.scala b/src/test/scala/sangria/validation/rules/KnownDirectivesSpec.scala index b945faf8..08411f31 100644 --- a/src/test/scala/sangria/validation/rules/KnownDirectivesSpec.scala +++ b/src/test/scala/sangria/validation/rules/KnownDirectivesSpec.scala @@ -42,7 +42,7 @@ class KnownDirectivesSpec extends WordSpec with ValidationSupport { } """, List( - "Unknown directive 'unknown'." → Some(Pos(3, 17)) + "Unknown directive 'unknown'." -> Some(Pos(3, 17)) )) "with many unknown directives" in expectFails( @@ -60,9 +60,9 @@ class KnownDirectivesSpec extends WordSpec with ValidationSupport { } """, List( - "Unknown directive 'unknown'." → Some(Pos(3, 17)), - "Unknown directive 'unknown'." → Some(Pos(6, 19)), - "Unknown directive 'unknown'." → Some(Pos(8, 20)) + "Unknown directive 'unknown'." -> Some(Pos(3, 17)), + "Unknown directive 'unknown'." -> Some(Pos(6, 19)), + "Unknown directive 'unknown'." -> Some(Pos(8, 20)) )) "with well placed directives" in expectPasses( @@ -90,11 +90,11 @@ class KnownDirectivesSpec extends WordSpec with ValidationSupport { someField } """, - "Directive 'onField' may not be used on variable definition." → Seq(Pos(2, 35)), - "Directive 'include' may not be used on query operation." → Seq(Pos(2, 45)), - "Directive 'onQuery' may not be used on field." → Seq(Pos(3, 18)), - "Directive 'onQuery' may not be used on fragment spread." → Seq(Pos(4, 21)), - "Directive 'onQuery' may not be used on mutation operation." → Seq(Pos(7, 24))) + "Directive 'onField' may not be used on variable definition." -> Seq(Pos(2, 35)), + "Directive 'include' may not be used on query operation." -> Seq(Pos(2, 45)), + "Directive 'onQuery' may not be used on field." -> Seq(Pos(3, 18)), + "Directive 'onQuery' may not be used on fragment spread." -> Seq(Pos(4, 21)), + "Directive 'onQuery' may not be used on mutation operation." -> Seq(Pos(7, 24))) } "within schema language" should { @@ -152,19 +152,19 @@ class KnownDirectivesSpec extends WordSpec with ValidationSupport { } """, List( - "Directive 'onInputFieldDefinition' may not be used on argument definition." → Some(Pos(3, 32)), - "Directive 'onInputFieldDefinition' may not be used on field definition." → Some(Pos(3, 65)), - "Directive 'onInterface' may not be used on object type definition." → Some(Pos(2, 45)), - "Directive 'onEnum' may not be used on scalar type definition." → Some(Pos(6, 27)), - "Directive 'onInputFieldDefinition' may not be used on argument definition." → Some(Pos(9, 32)), - "Directive 'onInputFieldDefinition' may not be used on field definition." → Some(Pos(9, 65)), - "Directive 'onObject' may not be used on interface definition." → Some(Pos(8, 33)), - "Directive 'onEnumValue' may not be used on union definition." → Some(Pos(12, 25)), - "Directive 'onUnion' may not be used on enum value definition." → Some(Pos(15, 22)), - "Directive 'onScalar' may not be used on enum definition." → Some(Pos(14, 23)), - "Directive 'onArgumentDefinition' may not be used on input field definition." → Some(Pos(19, 26)), - "Directive 'onEnum' may not be used on input object type definition." → Some(Pos(18, 25)), - "Directive 'onObject' may not be used on schema definition." → Some(Pos(22, 18)) + "Directive 'onInputFieldDefinition' may not be used on argument definition." -> Some(Pos(3, 32)), + "Directive 'onInputFieldDefinition' may not be used on field definition." -> Some(Pos(3, 65)), + "Directive 'onInterface' may not be used on object type definition." -> Some(Pos(2, 45)), + "Directive 'onEnum' may not be used on scalar type definition." -> Some(Pos(6, 27)), + "Directive 'onInputFieldDefinition' may not be used on argument definition." -> Some(Pos(9, 32)), + "Directive 'onInputFieldDefinition' may not be used on field definition." -> Some(Pos(9, 65)), + "Directive 'onObject' may not be used on interface definition." -> Some(Pos(8, 33)), + "Directive 'onEnumValue' may not be used on union definition." -> Some(Pos(12, 25)), + "Directive 'onUnion' may not be used on enum value definition." -> Some(Pos(15, 22)), + "Directive 'onScalar' may not be used on enum definition." -> Some(Pos(14, 23)), + "Directive 'onArgumentDefinition' may not be used on input field definition." -> Some(Pos(19, 26)), + "Directive 'onEnum' may not be used on input object type definition." -> Some(Pos(18, 25)), + "Directive 'onObject' may not be used on schema definition." -> Some(Pos(22, 18)) )) } } diff --git a/src/test/scala/sangria/validation/rules/KnownFragmentNamesSpec.scala b/src/test/scala/sangria/validation/rules/KnownFragmentNamesSpec.scala index df898582..880293b7 100644 --- a/src/test/scala/sangria/validation/rules/KnownFragmentNamesSpec.scala +++ b/src/test/scala/sangria/validation/rules/KnownFragmentNamesSpec.scala @@ -49,9 +49,9 @@ class KnownFragmentNamesSpec extends WordSpec with ValidationSupport { } """, List( - "Unknown fragment 'UnknownFragment1'." → Some(Pos(4, 13)), - "Unknown fragment 'UnknownFragment2'." → Some(Pos(6, 15)), - "Unknown fragment 'UnknownFragment3'." → Some(Pos(12, 11)) + "Unknown fragment 'UnknownFragment1'." -> Some(Pos(4, 13)), + "Unknown fragment 'UnknownFragment2'." -> Some(Pos(6, 15)), + "Unknown fragment 'UnknownFragment3'." -> Some(Pos(12, 11)) )) } } diff --git a/src/test/scala/sangria/validation/rules/KnownTypeNamesSpec.scala b/src/test/scala/sangria/validation/rules/KnownTypeNamesSpec.scala index ef704f89..2530a897 100644 --- a/src/test/scala/sangria/validation/rules/KnownTypeNamesSpec.scala +++ b/src/test/scala/sangria/validation/rules/KnownTypeNamesSpec.scala @@ -33,9 +33,9 @@ class KnownTypeNamesSpec extends WordSpec with ValidationSupport { } """, List( - "Unknown type 'JumbledUpLetters'." → Some(Pos(2, 25)), - "Unknown type 'Badger'." → Some(Pos(5, 27)), - "Unknown type 'Peettt'. Did you mean 'Pet'?" → Some(Pos(8, 31)) + "Unknown type 'JumbledUpLetters'." -> Some(Pos(2, 25)), + "Unknown type 'Badger'." -> Some(Pos(5, 27)), + "Unknown type 'Peettt'. Did you mean 'Pet'?" -> Some(Pos(8, 31)) )) } } diff --git a/src/test/scala/sangria/validation/rules/LoneAnonymousOperationSpec.scala b/src/test/scala/sangria/validation/rules/LoneAnonymousOperationSpec.scala index 999dd832..e3b1337a 100644 --- a/src/test/scala/sangria/validation/rules/LoneAnonymousOperationSpec.scala +++ b/src/test/scala/sangria/validation/rules/LoneAnonymousOperationSpec.scala @@ -53,8 +53,8 @@ class LoneAnonymousOperationSpec extends WordSpec with ValidationSupport { } """, List( - "This anonymous operation must be the only defined operation." → Some(Pos(2, 9)), - "This anonymous operation must be the only defined operation." → Some(Pos(5, 9)) + "This anonymous operation must be the only defined operation." -> Some(Pos(2, 9)), + "This anonymous operation must be the only defined operation." -> Some(Pos(5, 9)) )) "anon operation with another operation" in expectFails( @@ -67,7 +67,7 @@ class LoneAnonymousOperationSpec extends WordSpec with ValidationSupport { } """, List( - "This anonymous operation must be the only defined operation." → Some(Pos(2, 9)) + "This anonymous operation must be the only defined operation." -> Some(Pos(2, 9)) )) "anon operation with another operation with subscription" in expectFails( @@ -80,7 +80,7 @@ class LoneAnonymousOperationSpec extends WordSpec with ValidationSupport { } """, List( - "This anonymous operation must be the only defined operation." → Some(Pos(2, 9)) + "This anonymous operation must be the only defined operation." -> Some(Pos(2, 9)) )) } } diff --git a/src/test/scala/sangria/validation/rules/NoFragmentCyclesSpec.scala b/src/test/scala/sangria/validation/rules/NoFragmentCyclesSpec.scala index 79bc8e66..7a796d4c 100644 --- a/src/test/scala/sangria/validation/rules/NoFragmentCyclesSpec.scala +++ b/src/test/scala/sangria/validation/rules/NoFragmentCyclesSpec.scala @@ -52,7 +52,7 @@ class NoFragmentCyclesSpec extends WordSpec with ValidationSupport { fragment fragA on Human { relatives { ...fragA } }, """, List( - "Cannot spread fragment 'fragA' within itself." → Some(Pos(2, 47)) + "Cannot spread fragment 'fragA' within itself." -> Some(Pos(2, 47)) )) "no spreading itself directly" in expectFails( @@ -60,7 +60,7 @@ class NoFragmentCyclesSpec extends WordSpec with ValidationSupport { fragment fragA on Dog { ...fragA } """, List( - "Cannot spread fragment 'fragA' within itself." → Some(Pos(2, 33)) + "Cannot spread fragment 'fragA' within itself." -> Some(Pos(2, 33)) )) "no spreading itself directly within inline fragment" in expectFails( @@ -72,7 +72,7 @@ class NoFragmentCyclesSpec extends WordSpec with ValidationSupport { } """, List( - "Cannot spread fragment 'fragA' within itself." → Some(Pos(4, 13)) + "Cannot spread fragment 'fragA' within itself." -> Some(Pos(4, 13)) )) "no spreading itself indirectly" in expectFailsPosList( @@ -81,7 +81,7 @@ class NoFragmentCyclesSpec extends WordSpec with ValidationSupport { fragment fragB on Dog { ...fragA } """, List( - "Cannot spread fragment 'fragA' within itself via 'fragB'." → List(Pos(2, 33), Pos(3, 33)) + "Cannot spread fragment 'fragA' within itself via 'fragB'." -> List(Pos(2, 33), Pos(3, 33)) )) "no spreading itself indirectly reports opposite order" in expectFailsPosList( @@ -90,7 +90,7 @@ class NoFragmentCyclesSpec extends WordSpec with ValidationSupport { fragment fragA on Dog { ...fragB } """, List( - "Cannot spread fragment 'fragB' within itself via 'fragA'." → List(Pos(2, 33), Pos(3, 33)) + "Cannot spread fragment 'fragB' within itself via 'fragA'." -> List(Pos(2, 33), Pos(3, 33)) )) "no spreading itself indirectly within inline fragment" in expectFailsPosList( @@ -107,7 +107,7 @@ class NoFragmentCyclesSpec extends WordSpec with ValidationSupport { } """, List( - "Cannot spread fragment 'fragA' within itself via 'fragB'." → List(Pos(4, 13), Pos(9, 13)) + "Cannot spread fragment 'fragA' within itself via 'fragB'." -> List(Pos(4, 13), Pos(9, 13)) )) "no spreading itself deeply" in expectFailsPosList( @@ -122,9 +122,9 @@ class NoFragmentCyclesSpec extends WordSpec with ValidationSupport { fragment fragP on Dog { ...fragA, ...fragX } """, List( - "Cannot spread fragment 'fragA' within itself via 'fragB', 'fragC', 'fragO', 'fragP'." → + "Cannot spread fragment 'fragA' within itself via 'fragB', 'fragC', 'fragO', 'fragP'." -> List(Pos(2, 33), Pos(3, 33), Pos(4, 33), Pos(8, 33), Pos(9, 33)), - "Cannot spread fragment 'fragO' within itself via 'fragP', 'fragX', 'fragY', 'fragZ'." → + "Cannot spread fragment 'fragO' within itself via 'fragP', 'fragX', 'fragY', 'fragZ'." -> List(Pos(8, 33), Pos(9, 43), Pos(5, 33), Pos(6, 33), Pos(7, 33)) )) @@ -135,8 +135,8 @@ class NoFragmentCyclesSpec extends WordSpec with ValidationSupport { fragment fragC on Dog { ...fragA } """, List( - "Cannot spread fragment 'fragA' within itself via 'fragB'." → List(Pos(2, 33), Pos(3, 33)), - "Cannot spread fragment 'fragA' within itself via 'fragC'." → List(Pos(2, 43), Pos(4, 33)) + "Cannot spread fragment 'fragA' within itself via 'fragB'." -> List(Pos(2, 33), Pos(3, 33)), + "Cannot spread fragment 'fragA' within itself via 'fragC'." -> List(Pos(2, 43), Pos(4, 33)) )) "no spreading itself deeply two paths -- alt traverse order" in expectFailsPosList( @@ -146,8 +146,8 @@ class NoFragmentCyclesSpec extends WordSpec with ValidationSupport { fragment fragC on Dog { ...fragA, ...fragB } """, List( - "Cannot spread fragment 'fragA' within itself via 'fragC'." → List(Pos(2, 33), Pos(4, 33)), - "Cannot spread fragment 'fragC' within itself via 'fragB'." → List(Pos(4, 43), Pos(3, 33)) + "Cannot spread fragment 'fragA' within itself via 'fragC'." -> List(Pos(2, 33), Pos(4, 33)), + "Cannot spread fragment 'fragC' within itself via 'fragB'." -> List(Pos(4, 43), Pos(3, 33)) )) "no spreading itself deeply and immediately" in expectFailsPosList( @@ -157,9 +157,9 @@ class NoFragmentCyclesSpec extends WordSpec with ValidationSupport { fragment fragC on Dog { ...fragA, ...fragB } """, List( - "Cannot spread fragment 'fragB' within itself" → List(Pos(3, 33)), - "Cannot spread fragment 'fragA' within itself via 'fragB', 'fragC'." → List(Pos(2, 33), Pos(3, 43), Pos(4, 33)), - "Cannot spread fragment 'fragB' within itself via 'fragC'." → List(Pos(3, 43), Pos(4, 43)) + "Cannot spread fragment 'fragB' within itself" -> List(Pos(3, 33)), + "Cannot spread fragment 'fragA' within itself via 'fragB', 'fragC'." -> List(Pos(2, 33), Pos(3, 43), Pos(4, 33)), + "Cannot spread fragment 'fragB' within itself via 'fragC'." -> List(Pos(3, 43), Pos(4, 43)) )) } } diff --git a/src/test/scala/sangria/validation/rules/NoUndefinedVariablesSpec.scala b/src/test/scala/sangria/validation/rules/NoUndefinedVariablesSpec.scala index b1a0a332..9334b63d 100644 --- a/src/test/scala/sangria/validation/rules/NoUndefinedVariablesSpec.scala +++ b/src/test/scala/sangria/validation/rules/NoUndefinedVariablesSpec.scala @@ -109,7 +109,7 @@ class NoUndefinedVariablesSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$d' is not defined by operation 'Foo'." → List(Pos(3, 41), Pos(2, 9)) + "Variable '$d' is not defined by operation 'Foo'." -> List(Pos(3, 41), Pos(2, 9)) )) "variable not defined by un-named query" in expectFailsPosList( @@ -119,7 +119,7 @@ class NoUndefinedVariablesSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$a' is not defined." → List(Pos(3, 20), Pos(2, 9)) + "Variable '$a' is not defined." -> List(Pos(3, 20), Pos(2, 9)) )) "multiple variables not defined" in expectFailsPosList( @@ -129,8 +129,8 @@ class NoUndefinedVariablesSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$a' is not defined by operation 'Foo'." → List(Pos(3, 20), Pos(2, 9)), - "Variable '$c' is not defined by operation 'Foo'." → List(Pos(3, 34), Pos(2, 9)) + "Variable '$a' is not defined by operation 'Foo'." -> List(Pos(3, 20), Pos(2, 9)), + "Variable '$c' is not defined by operation 'Foo'." -> List(Pos(3, 34), Pos(2, 9)) )) "variable in fragment not defined by un-named query" in expectFailsPosList( @@ -143,7 +143,7 @@ class NoUndefinedVariablesSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$a' is not defined." → List(Pos(6, 20), Pos(2, 9)) + "Variable '$a' is not defined." -> List(Pos(6, 20), Pos(2, 9)) )) "variable in fragment not defined by operation" in expectFailsPosList( @@ -166,7 +166,7 @@ class NoUndefinedVariablesSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$c' is not defined by operation 'Foo'." → List(Pos(16, 20), Pos(2, 9)) + "Variable '$c' is not defined by operation 'Foo'." -> List(Pos(16, 20), Pos(2, 9)) )) "multiple variables in fragments not defined" in expectFailsPosList( @@ -189,8 +189,8 @@ class NoUndefinedVariablesSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$a' is not defined by operation 'Foo'." → List(Pos(6, 20), Pos(2, 9)), - "Variable '$c' is not defined by operation 'Foo'." → List(Pos(16, 20), Pos(2, 9)) + "Variable '$a' is not defined by operation 'Foo'." -> List(Pos(6, 20), Pos(2, 9)), + "Variable '$c' is not defined by operation 'Foo'." -> List(Pos(16, 20), Pos(2, 9)) )) "single variable in fragment not defined by multiple operations" in expectFailsPosList( @@ -206,8 +206,8 @@ class NoUndefinedVariablesSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$b' is not defined by operation 'Foo'." → List(Pos(9, 27), Pos(2, 9)), - "Variable '$b' is not defined by operation 'Bar'." → List(Pos(9, 27), Pos(5, 9)) + "Variable '$b' is not defined by operation 'Foo'." -> List(Pos(9, 27), Pos(2, 9)), + "Variable '$b' is not defined by operation 'Bar'." -> List(Pos(9, 27), Pos(5, 9)) )) "variables in fragment not defined by multiple operations" in expectFailsPosList( @@ -223,8 +223,8 @@ class NoUndefinedVariablesSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$a' is not defined by operation 'Foo'." → List(Pos(9, 20), Pos(2, 9)), - "Variable '$b' is not defined by operation 'Bar'." → List(Pos(9, 27), Pos(5, 9)) + "Variable '$a' is not defined by operation 'Foo'." -> List(Pos(9, 20), Pos(2, 9)), + "Variable '$b' is not defined by operation 'Bar'." -> List(Pos(9, 27), Pos(5, 9)) )) "variable in fragment used by other operation" in expectFailsPosList( @@ -243,8 +243,8 @@ class NoUndefinedVariablesSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$a' is not defined by operation 'Foo'." → List(Pos(9, 20), Pos(2, 9)), - "Variable '$b' is not defined by operation 'Bar'." → List(Pos(12, 20), Pos(5, 9)) + "Variable '$a' is not defined by operation 'Foo'." -> List(Pos(9, 20), Pos(2, 9)), + "Variable '$b' is not defined by operation 'Bar'." -> List(Pos(12, 20), Pos(5, 9)) )) "multiple undefined variables produce multiple errors" in expectFailsPosList( @@ -265,12 +265,12 @@ class NoUndefinedVariablesSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$a' is not defined by operation 'Foo'." → List(Pos(9, 21), Pos(2, 9)), - "Variable '$c' is not defined by operation 'Foo'." → List(Pos(14, 21), Pos(2, 9)), - "Variable '$a' is not defined by operation 'Foo'." → List(Pos(11, 21), Pos(2, 9)), - "Variable '$b' is not defined by operation 'Bar'." → List(Pos(9, 28), Pos(5, 9)), - "Variable '$c' is not defined by operation 'Bar'." → List(Pos(14, 21), Pos(5, 9)), - "Variable '$b' is not defined by operation 'Bar'." → List(Pos(11, 28), Pos(5, 9)) + "Variable '$a' is not defined by operation 'Foo'." -> List(Pos(9, 21), Pos(2, 9)), + "Variable '$c' is not defined by operation 'Foo'." -> List(Pos(14, 21), Pos(2, 9)), + "Variable '$a' is not defined by operation 'Foo'." -> List(Pos(11, 21), Pos(2, 9)), + "Variable '$b' is not defined by operation 'Bar'." -> List(Pos(9, 28), Pos(5, 9)), + "Variable '$c' is not defined by operation 'Bar'." -> List(Pos(14, 21), Pos(5, 9)), + "Variable '$b' is not defined by operation 'Bar'." -> List(Pos(11, 28), Pos(5, 9)) )) } } diff --git a/src/test/scala/sangria/validation/rules/NoUnusedFragmentsSpec.scala b/src/test/scala/sangria/validation/rules/NoUnusedFragmentsSpec.scala index dbdb37b4..025bf4e0 100644 --- a/src/test/scala/sangria/validation/rules/NoUnusedFragmentsSpec.scala +++ b/src/test/scala/sangria/validation/rules/NoUnusedFragmentsSpec.scala @@ -84,8 +84,8 @@ class NoUnusedFragmentsSpec extends WordSpec with ValidationSupport { } """, List( - "Fragment 'Unused1' is not used." → Some(Pos(22, 9)), - "Fragment 'Unused2' is not used." → Some(Pos(25, 9)) + "Fragment 'Unused1' is not used." -> Some(Pos(22, 9)), + "Fragment 'Unused2' is not used." -> Some(Pos(25, 9)) )) "contains unknown fragments with ref cycle" in expectFails( @@ -120,8 +120,8 @@ class NoUnusedFragmentsSpec extends WordSpec with ValidationSupport { } """, List( - "Fragment 'Unused1' is not used." → Some(Pos(22, 9)), - "Fragment 'Unused2' is not used." → Some(Pos(26, 9)) + "Fragment 'Unused1' is not used." -> Some(Pos(22, 9)), + "Fragment 'Unused2' is not used." -> Some(Pos(26, 9)) )) "contains unknown and undef fragments" in expectFails( @@ -136,7 +136,7 @@ class NoUnusedFragmentsSpec extends WordSpec with ValidationSupport { } """, List( - "Fragment 'foo' is not used." → Some(Pos(7, 9)) + "Fragment 'foo' is not used." -> Some(Pos(7, 9)) )) } } diff --git a/src/test/scala/sangria/validation/rules/NoUnusedVariablesSpec.scala b/src/test/scala/sangria/validation/rules/NoUnusedVariablesSpec.scala index f49ecc93..5f6e55c0 100644 --- a/src/test/scala/sangria/validation/rules/NoUnusedVariablesSpec.scala +++ b/src/test/scala/sangria/validation/rules/NoUnusedVariablesSpec.scala @@ -96,7 +96,7 @@ class NoUnusedVariablesSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$c' is not used in operation Foo." → Some(Pos(2, 43)) + "Variable '$c' is not used in operation Foo." -> Some(Pos(2, 43)) )) "multiple variables not used" in expectFails( @@ -106,8 +106,8 @@ class NoUnusedVariablesSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$a' is not used in operation Foo." → Some(Pos(2, 19)), - "Variable '$c' is not used in operation Foo." → Some(Pos(2, 43)) + "Variable '$a' is not used in operation Foo." -> Some(Pos(2, 19)), + "Variable '$c' is not used in operation Foo." -> Some(Pos(2, 43)) )) "variable not used in fragments" in expectFails( @@ -130,7 +130,7 @@ class NoUnusedVariablesSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$c' is not used in operation Foo." → Some(Pos(2, 43)) + "Variable '$c' is not used in operation Foo." -> Some(Pos(2, 43)) )) "multiple variables not used 1" in expectFails( @@ -153,8 +153,8 @@ class NoUnusedVariablesSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$a' is not used in operation Foo." → Some(Pos(2, 19)), - "Variable '$c' is not used in operation Foo." → Some(Pos(2, 43)) + "Variable '$a' is not used in operation Foo." -> Some(Pos(2, 19)), + "Variable '$c' is not used in operation Foo." -> Some(Pos(2, 43)) )) "variable not used by unreferenced fragment" in expectFails( @@ -170,7 +170,7 @@ class NoUnusedVariablesSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$b' is not used in operation Foo." → Some(Pos(2, 19)) + "Variable '$b' is not used in operation Foo." -> Some(Pos(2, 19)) )) "variable not used by fragment used by other operation" in expectFails( @@ -189,8 +189,8 @@ class NoUnusedVariablesSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$b' is not used in operation Foo." → Some(Pos(2, 19)), - "Variable '$a' is not used in operation Bar." → Some(Pos(5, 19)) + "Variable '$b' is not used in operation Foo." -> Some(Pos(2, 19)), + "Variable '$a' is not used in operation Bar." -> Some(Pos(5, 19)) )) } } diff --git a/src/test/scala/sangria/validation/rules/OverlappingFieldsCanBeMergedSpec.scala b/src/test/scala/sangria/validation/rules/OverlappingFieldsCanBeMergedSpec.scala index df12a528..eda381c1 100644 --- a/src/test/scala/sangria/validation/rules/OverlappingFieldsCanBeMergedSpec.scala +++ b/src/test/scala/sangria/validation/rules/OverlappingFieldsCanBeMergedSpec.scala @@ -100,7 +100,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'fido' conflict because 'name' and 'nickname' are different fields." → List(Pos(3, 11), Pos(4, 11)) + "Field 'fido' conflict because 'name' and 'nickname' are different fields." -> List(Pos(3, 11), Pos(4, 11)) )) "Same aliases allowed on non-overlapping fields" in expectPasses( @@ -123,7 +123,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'name' conflict because 'nickname' and 'name' are different fields." → List(Pos(3, 11), Pos(4, 11)) + "Field 'name' conflict because 'nickname' and 'name' are different fields." -> List(Pos(3, 11), Pos(4, 11)) )) "different args, second adds an argument" in expectFailsPosList( @@ -134,7 +134,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'doesKnowCommand' conflict because they have differing arguments." → List(Pos(3, 11), Pos(4, 11)) + "Field 'doesKnowCommand' conflict because they have differing arguments." -> List(Pos(3, 11), Pos(4, 11)) )) "different args, second missing an argument" in expectFailsPosList( @@ -145,7 +145,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'doesKnowCommand' conflict because they have differing arguments." → List(Pos(3, 11), Pos(4, 11)) + "Field 'doesKnowCommand' conflict because they have differing arguments." -> List(Pos(3, 11), Pos(4, 11)) )) "conflicting args" in expectFailsPosList( @@ -156,7 +156,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'doesKnowCommand' conflict because they have differing arguments." → List(Pos(3, 11), Pos(4, 11)) + "Field 'doesKnowCommand' conflict because they have differing arguments." -> List(Pos(3, 11), Pos(4, 11)) )) "allows different args where no conflict is possible" in expectPasses( @@ -185,7 +185,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'x' conflict because 'a' and 'b' are different fields." → List(Pos(7, 11), Pos(10, 11)) + "Field 'x' conflict because 'a' and 'b' are different fields." -> List(Pos(7, 11), Pos(10, 11)) )) "reports each conflict once" in expectFailsPosList( @@ -213,9 +213,9 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'x' conflict because 'a' and 'b' are different fields." → List(Pos(18, 11), Pos(21, 11)), - "Field 'x' conflict because 'c' and 'a' are different fields." → List(Pos(14, 13), Pos(18, 11)), - "Field 'x' conflict because 'c' and 'b' are different fields." → List(Pos(14, 13), Pos(21, 11)) + "Field 'x' conflict because 'a' and 'b' are different fields." -> List(Pos(18, 11), Pos(21, 11)), + "Field 'x' conflict because 'c' and 'a' are different fields." -> List(Pos(14, 13), Pos(18, 11)), + "Field 'x' conflict because 'c' and 'b' are different fields." -> List(Pos(14, 13), Pos(21, 11)) )) "deep conflict" in expectFailsPosList( @@ -230,7 +230,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'field' conflict because subfields 'x' conflict because 'a' and 'b' are different fields." → List(Pos(3, 11), Pos(4, 13), Pos(6, 11), Pos(7, 13)) + "Field 'field' conflict because subfields 'x' conflict because 'a' and 'b' are different fields." -> List(Pos(3, 11), Pos(4, 13), Pos(6, 11), Pos(7, 13)) )) "deep conflict with multiple issues" in expectFailsPosList( @@ -247,7 +247,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'field' conflict because subfields 'y' conflict because 'c' and 'd' are different fields and subfields 'x' conflict because 'a' and 'b' are different fields." → + "Field 'field' conflict because subfields 'y' conflict because 'c' and 'd' are different fields and subfields 'x' conflict because 'a' and 'b' are different fields." -> List(Pos(3, 11), Pos(5, 13), Pos(4, 13), Pos(7, 11), Pos(9, 13), Pos(8, 13)) )) @@ -267,7 +267,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'field' conflict because subfields 'deepField' conflict because subfields 'x' conflict because 'a' and 'b' are different fields." → + "Field 'field' conflict because subfields 'deepField' conflict because subfields 'x' conflict because 'a' and 'b' are different fields." -> List(Pos(3, 11), Pos(4, 13), Pos(5, 15), Pos(8, 11), Pos(9, 13), Pos(10, 15)) )) @@ -290,7 +290,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'deepField' conflict because subfields 'x' conflict because 'a' and 'b' are different fields." → + "Field 'deepField' conflict because subfields 'x' conflict because 'a' and 'b' are different fields." -> List(Pos(4, 13), Pos(5, 15), Pos(7, 13), Pos(8, 15)) )) @@ -321,7 +321,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'deeperField' conflict because subfields 'x' conflict because 'a' and 'b' are different fields." → + "Field 'deeperField' conflict because subfields 'x' conflict because 'a' and 'b' are different fields." -> List(Pos(12, 13), Pos(13, 15), Pos(15, 13), Pos(16, 15)) )) @@ -351,7 +351,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'field' conflict because subfields 'x' conflict because 'a' and 'b' are different fields and subfields 'y' conflict because 'c' and 'd' are different fields." → + "Field 'field' conflict because subfields 'x' conflict because 'a' and 'b' are different fields and subfields 'y' conflict because 'c' and 'd' are different fields." -> List(Pos(3, 11), Pos(11, 11), Pos(15, 11), Pos(6, 11), Pos(22, 11), Pos(18, 11)) )) @@ -370,47 +370,47 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { """) "return types must be unambiguous" should { - lazy val SomeBox: InterfaceType[Unit, Unit] = InterfaceType("SomeBox", () ⇒ fields[Unit, Unit]( - Field("deepBox", OptionType(SomeBox), resolve = _ ⇒ None), - Field("unrelatedField", OptionType(StringType), resolve = _ ⇒ None) + lazy val SomeBox: InterfaceType[Unit, Unit] = InterfaceType("SomeBox", () => fields[Unit, Unit]( + Field("deepBox", OptionType(SomeBox), resolve = _ => None), + Field("unrelatedField", OptionType(StringType), resolve = _ => None) )) - lazy val StringBox: ObjectType[Unit, Unit] = ObjectType("StringBox", interfaces[Unit, Unit](SomeBox), () ⇒ fields[Unit, Unit]( - Field("unrelatedField", OptionType(StringType), resolve = _ ⇒ None), - Field("deepBox", OptionType(StringBox), resolve = _ ⇒ None), - Field("scalar", OptionType(StringType), resolve = _ ⇒ None), - Field("listStringBox", OptionType(ListType(OptionType(StringBox))), resolve = _ ⇒ None), - Field("stringBox", OptionType(StringBox), resolve = _ ⇒ None), - Field("intBox", OptionType(IntBox), resolve = _ ⇒ None) + lazy val StringBox: ObjectType[Unit, Unit] = ObjectType("StringBox", interfaces[Unit, Unit](SomeBox), () => fields[Unit, Unit]( + Field("unrelatedField", OptionType(StringType), resolve = _ => None), + Field("deepBox", OptionType(StringBox), resolve = _ => None), + Field("scalar", OptionType(StringType), resolve = _ => None), + Field("listStringBox", OptionType(ListType(OptionType(StringBox))), resolve = _ => None), + Field("stringBox", OptionType(StringBox), resolve = _ => None), + Field("intBox", OptionType(IntBox), resolve = _ => None) )) - lazy val IntBox: ObjectType[Unit, Unit] = ObjectType("IntBox", interfaces[Unit, Unit](SomeBox), () ⇒ fields[Unit, Unit]( - Field("unrelatedField", OptionType(StringType), resolve = _ ⇒ None), - Field("deepBox", OptionType(IntBox), resolve = _ ⇒ None), - Field("scalar", OptionType(IntType), resolve = _ ⇒ None), - Field("listStringBox", OptionType(ListType(OptionType(StringBox))), resolve = _ ⇒ None), - Field("stringBox", OptionType(StringBox), resolve = _ ⇒ None), - Field("intBox", OptionType(IntBox), resolve = _ ⇒ None) + lazy val IntBox: ObjectType[Unit, Unit] = ObjectType("IntBox", interfaces[Unit, Unit](SomeBox), () => fields[Unit, Unit]( + Field("unrelatedField", OptionType(StringType), resolve = _ => None), + Field("deepBox", OptionType(IntBox), resolve = _ => None), + Field("scalar", OptionType(IntType), resolve = _ => None), + Field("listStringBox", OptionType(ListType(OptionType(StringBox))), resolve = _ => None), + Field("stringBox", OptionType(StringBox), resolve = _ => None), + Field("intBox", OptionType(IntBox), resolve = _ => None) )) val NonNullStringBox1 = InterfaceType("NonNullStringBox1", fields[Unit, Unit]( - Field("scalar", StringType, resolve = _ ⇒ "") + Field("scalar", StringType, resolve = _ => "") )) val NonNullStringBox1Impl = ObjectType("NonNullStringBox1Impl", interfaces[Unit, Unit](SomeBox, NonNullStringBox1), fields[Unit, Unit]( - Field("scalar", StringType, resolve = _ ⇒ ""), - Field("unrelatedField", OptionType(StringType), resolve = _ ⇒ None), - Field("deepBox", OptionType(SomeBox), resolve = _ ⇒ None) + Field("scalar", StringType, resolve = _ => ""), + Field("unrelatedField", OptionType(StringType), resolve = _ => None), + Field("deepBox", OptionType(SomeBox), resolve = _ => None) )) val NonNullStringBox2 = InterfaceType("NonNullStringBox2", fields[Unit, Unit]( - Field("scalar", StringType, resolve = _ ⇒ "") + Field("scalar", StringType, resolve = _ => "") )) val NonNullStringBox2Impl = ObjectType("NonNullStringBox2Impl", interfaces[Unit, Unit](SomeBox, NonNullStringBox2), fields[Unit, Unit]( - Field("scalar", StringType, resolve = _ ⇒ ""), - Field("unrelatedField", OptionType(StringType), resolve = _ ⇒ None), - Field("deepBox", OptionType(SomeBox), resolve = _ ⇒ None) + Field("scalar", StringType, resolve = _ => ""), + Field("unrelatedField", OptionType(StringType), resolve = _ => None), + Field("deepBox", OptionType(SomeBox), resolve = _ => None) )) val Connection = ObjectType("Connection", fields[Unit, Unit]( @@ -418,17 +418,17 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { ObjectType("Edge", fields[Unit, Unit]( Field("node", OptionType( ObjectType("Node", fields[Unit, Unit]( - Field("id", OptionType(IDType), resolve = _ ⇒ ""), - Field("name", OptionType(StringType), resolve = _ ⇒ "") + Field("id", OptionType(IDType), resolve = _ => ""), + Field("name", OptionType(StringType), resolve = _ => "") )) - ), resolve = _ ⇒ ()) + ), resolve = _ => ()) )) - ))), resolve = _ ⇒ Nil) + ))), resolve = _ => Nil) )) val schema = Schema(ObjectType("QueryRoot", fields[Unit, Unit]( - Field("someBox", OptionType(SomeBox), resolve = _ ⇒ ()), - Field("connection", OptionType(Connection), resolve = _ ⇒ ()) + Field("someBox", OptionType(SomeBox), resolve = _ => ()), + Field("connection", OptionType(Connection), resolve = _ => ()) )), additionalTypes = IntBox :: StringBox :: NonNullStringBox1 :: NonNullStringBox1Impl :: NonNullStringBox2 :: NonNullStringBox2Impl :: Nil) // This is invalid since an object could potentially be both the Object @@ -449,7 +449,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'scalar' conflict because they return conflicting types 'Int' and 'String!'." → + "Field 'scalar' conflict because they return conflicting types 'Int' and 'String!'." -> List(Pos(5, 17), Pos(8, 17)) )) @@ -503,7 +503,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'edges' conflict because subfields 'node' conflict because subfields 'id' conflict because 'name' and 'id' are different fields." → + "Field 'edges' conflict because subfields 'node' conflict because subfields 'id' conflict because 'name' and 'id' are different fields." -> List(Pos(5, 15), Pos(6, 17), Pos(7, 19), Pos(14, 13), Pos(15, 15), Pos(16, 17)) )) @@ -555,7 +555,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } } """, - List("Field 'scalar' conflict because they return conflicting types 'Int' and 'String'" → List(Pos(5, 17), Pos(8, 17)))) + List("Field 'scalar' conflict because they return conflicting types 'Int' and 'String'" -> List(Pos(5, 17), Pos(8, 17)))) "reports correctly when a non-exclusive follows an exclusive" in expectInvalid(schema, new OverlappingFieldsCanBeMerged :: Nil, """ @@ -602,7 +602,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { scalar: unrelatedField } """, - List("Field 'other' conflict because subfields 'scalar' conflict because 'scalar' and 'unrelatedField' are different fields." → + List("Field 'other' conflict because subfields 'scalar' conflict because 'scalar' and 'unrelatedField' are different fields." -> List(Pos(31, 13), Pos(39, 13), Pos(34, 13), Pos(42, 13)))) "disallows differing return type nullability despite no overlap" in expectInvalid(schema, new OverlappingFieldsCanBeMerged :: Nil, @@ -618,7 +618,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } } """, - List("Field 'scalar' conflict because they return conflicting types 'String!' and 'String'" → List(Pos(5, 17), Pos(8, 17)))) + List("Field 'scalar' conflict because they return conflicting types 'String!' and 'String'" -> List(Pos(5, 17), Pos(8, 17)))) "disallows differing return type list despite no overlap" in expectInvalid(schema, new OverlappingFieldsCanBeMerged :: Nil, """ @@ -637,7 +637,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } } """, - List("Field 'box' conflict because they return conflicting types '[StringBox]' and 'StringBox'" → List(Pos(5, 17), Pos(10, 17)))) + List("Field 'box' conflict because they return conflicting types '[StringBox]' and 'StringBox'" -> List(Pos(5, 17), Pos(10, 17)))) "disallows differing return type list despite no overlap (reverse)" in expectInvalid(schema, new OverlappingFieldsCanBeMerged :: Nil, """ @@ -656,7 +656,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } } """, - List("Field 'box' conflict because they return conflicting types 'StringBox' and '[StringBox]'" → List(Pos(5, 17), Pos(10, 17)))) + List("Field 'box' conflict because they return conflicting types 'StringBox' and '[StringBox]'" -> List(Pos(5, 17), Pos(10, 17)))) "disallows differing subfields" in expectInvalid(schema, new OverlappingFieldsCanBeMerged :: Nil, """ @@ -677,7 +677,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'val' conflict because 'scalar' and 'unrelatedField' are different fields." → List(Pos(6, 19), Pos(7, 19)))) + "Field 'val' conflict because 'scalar' and 'unrelatedField' are different fields." -> List(Pos(6, 19), Pos(7, 19)))) "disallows differing deep return types despite no overlap" in expectInvalid(schema, new OverlappingFieldsCanBeMerged :: Nil, """ @@ -696,7 +696,7 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } } """, - List("Field 'box' conflict because subfields 'scalar' conflict because they return conflicting types 'String' and 'Int'" → + List("Field 'box' conflict because subfields 'scalar' conflict because they return conflicting types 'String' and 'Int'" -> List(Pos(5, 17), Pos(6, 19), Pos(10, 17), Pos(11, 19)))) } @@ -736,8 +736,8 @@ class OverlappingFieldsCanBeMergedSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'fido' conflict because 'name' and 'nickname' are different fields" → List(Pos(4, 10), Pos(5, 10)), - "Field 'fido' conflict because 'name' and 'nickname' are different fields" → List(Pos(4, 10), Pos(5, 10)), - "Field 'fido' conflict because 'name' and 'nickname' are different fields" → List(Pos(4, 10), Pos(5, 10)))) + "Field 'fido' conflict because 'name' and 'nickname' are different fields" -> List(Pos(4, 10), Pos(5, 10)), + "Field 'fido' conflict because 'name' and 'nickname' are different fields" -> List(Pos(4, 10), Pos(5, 10)), + "Field 'fido' conflict because 'name' and 'nickname' are different fields" -> List(Pos(4, 10), Pos(5, 10)))) } } diff --git a/src/test/scala/sangria/validation/rules/PossibleFragmentSpreadsSpec.scala b/src/test/scala/sangria/validation/rules/PossibleFragmentSpreadsSpec.scala index 0228edbc..485bcdc9 100644 --- a/src/test/scala/sangria/validation/rules/PossibleFragmentSpreadsSpec.scala +++ b/src/test/scala/sangria/validation/rules/PossibleFragmentSpreadsSpec.scala @@ -72,7 +72,7 @@ class PossibleFragmentSpreadsSpec extends WordSpec with ValidationSupport { fragment dogFragment on Dog { barkVolume } """, List( - "Fragment 'dogFragment' cannot be spread here as objects of type 'Cat' can never be of type 'Dog'." → Some(Pos(2, 53)) + "Fragment 'dogFragment' cannot be spread here as objects of type 'Cat' can never be of type 'Dog'." -> Some(Pos(2, 53)) )) "different object into object in inline fragment" in expectFails( @@ -82,7 +82,7 @@ class PossibleFragmentSpreadsSpec extends WordSpec with ValidationSupport { } """, List( - "Fragment cannot be spread here as objects of type 'Cat' can never be of type 'Dog'." → Some(Pos(3, 11)) + "Fragment cannot be spread here as objects of type 'Cat' can never be of type 'Dog'." -> Some(Pos(3, 11)) )) "object into not implementing interface" in expectFails( @@ -91,7 +91,7 @@ class PossibleFragmentSpreadsSpec extends WordSpec with ValidationSupport { fragment humanFragment on Human { pets { name } } """, List( - "Fragment 'humanFragment' cannot be spread here as objects of type 'Pet' can never be of type 'Human'." → Some(Pos(2, 56)) + "Fragment 'humanFragment' cannot be spread here as objects of type 'Pet' can never be of type 'Human'." -> Some(Pos(2, 56)) )) "object into not containing union" in expectFails( @@ -100,7 +100,7 @@ class PossibleFragmentSpreadsSpec extends WordSpec with ValidationSupport { fragment humanFragment on Human { pets { name } } """, List( - "Fragment 'humanFragment' cannot be spread here as objects of type 'CatOrDog' can never be of type 'Human'." → Some(Pos(2, 57)) + "Fragment 'humanFragment' cannot be spread here as objects of type 'CatOrDog' can never be of type 'Human'." -> Some(Pos(2, 57)) )) "union into not contained object" in expectFails( @@ -109,7 +109,7 @@ class PossibleFragmentSpreadsSpec extends WordSpec with ValidationSupport { fragment catOrDogFragment on CatOrDog { __typename } """, List( - "Fragment 'catOrDogFragment' cannot be spread here as objects of type 'Human' can never be of type 'CatOrDog'." → Some(Pos(2, 54)) + "Fragment 'catOrDogFragment' cannot be spread here as objects of type 'Human' can never be of type 'CatOrDog'." -> Some(Pos(2, 54)) )) "union into non overlapping interface" in expectFails( @@ -118,7 +118,7 @@ class PossibleFragmentSpreadsSpec extends WordSpec with ValidationSupport { fragment humanOrAlienFragment on HumanOrAlien { __typename } """, List( - "Fragment 'humanOrAlienFragment' cannot be spread here as objects of type 'Pet' can never be of type 'HumanOrAlien'." → Some(Pos(2, 55)) + "Fragment 'humanOrAlienFragment' cannot be spread here as objects of type 'Pet' can never be of type 'HumanOrAlien'." -> Some(Pos(2, 55)) )) "union into non overlapping union" in expectFails( @@ -127,7 +127,7 @@ class PossibleFragmentSpreadsSpec extends WordSpec with ValidationSupport { fragment humanOrAlienFragment on HumanOrAlien { __typename } """, List( - "Fragment 'humanOrAlienFragment' cannot be spread here as objects of type 'CatOrDog' can never be of type 'HumanOrAlien'." → Some(Pos(2, 56)) + "Fragment 'humanOrAlienFragment' cannot be spread here as objects of type 'CatOrDog' can never be of type 'HumanOrAlien'." -> Some(Pos(2, 56)) )) "interface into non implementing object" in expectFails( @@ -136,7 +136,7 @@ class PossibleFragmentSpreadsSpec extends WordSpec with ValidationSupport { fragment intelligentFragment on Intelligent { iq } """, List( - "Fragment 'intelligentFragment' cannot be spread here as objects of type 'Cat' can never be of type 'Intelligent'." → Some(Pos(2, 56)) + "Fragment 'intelligentFragment' cannot be spread here as objects of type 'Cat' can never be of type 'Intelligent'." -> Some(Pos(2, 56)) )) "interface into non overlapping interface" in expectFails( @@ -147,7 +147,7 @@ class PossibleFragmentSpreadsSpec extends WordSpec with ValidationSupport { fragment intelligentFragment on Intelligent { iq } """, List( - "Fragment 'intelligentFragment' cannot be spread here as objects of type 'Pet' can never be of type 'Intelligent'." → Some(Pos(3, 11)) + "Fragment 'intelligentFragment' cannot be spread here as objects of type 'Pet' can never be of type 'Intelligent'." -> Some(Pos(3, 11)) )) "interface into non overlapping interface in inline fragment" in expectFails( @@ -157,7 +157,7 @@ class PossibleFragmentSpreadsSpec extends WordSpec with ValidationSupport { } """, List( - "Fragment cannot be spread here as objects of type 'Pet' can never be of type 'Intelligent'." → Some(Pos(3, 11)) + "Fragment cannot be spread here as objects of type 'Pet' can never be of type 'Intelligent'." -> Some(Pos(3, 11)) )) "interface into non overlapping union" in expectFails( @@ -166,7 +166,7 @@ class PossibleFragmentSpreadsSpec extends WordSpec with ValidationSupport { fragment petFragment on Pet { name } """, List( - "Fragment 'petFragment' cannot be spread here as objects of type 'HumanOrAlien' can never be of type 'Pet'." → Some(Pos(2, 64)) + "Fragment 'petFragment' cannot be spread here as objects of type 'HumanOrAlien' can never be of type 'Pet'." -> Some(Pos(2, 64)) )) } } diff --git a/src/test/scala/sangria/validation/rules/ProvidedRequiredArgumentsSpec.scala b/src/test/scala/sangria/validation/rules/ProvidedRequiredArgumentsSpec.scala index 3815220e..5ab0e2f2 100644 --- a/src/test/scala/sangria/validation/rules/ProvidedRequiredArgumentsSpec.scala +++ b/src/test/scala/sangria/validation/rules/ProvidedRequiredArgumentsSpec.scala @@ -119,7 +119,7 @@ class ProvidedRequiredArgumentsSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'multipleReqs' argument 'req1' of type 'Int!' is required but not provided." → Some(Pos(4, 15)) + "Field 'multipleReqs' argument 'req1' of type 'Int!' is required but not provided." -> Some(Pos(4, 15)) )) "Missing multiple non-nullable arguments" in expectFails( @@ -131,8 +131,8 @@ class ProvidedRequiredArgumentsSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'multipleReqs' argument 'req1' of type 'Int!' is required but not provided." → Some(Pos(4, 15)), - "Field 'multipleReqs' argument 'req2' of type 'Int!' is required but not provided." → Some(Pos(4, 15)) + "Field 'multipleReqs' argument 'req1' of type 'Int!' is required but not provided." -> Some(Pos(4, 15)), + "Field 'multipleReqs' argument 'req2' of type 'Int!' is required but not provided." -> Some(Pos(4, 15)) )) "Incorrect value and missing argument" in expectFails( @@ -144,7 +144,7 @@ class ProvidedRequiredArgumentsSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'multipleReqs' argument 'req2' of type 'Int!' is required but not provided." → Some(Pos(4, 15)) + "Field 'multipleReqs' argument 'req2' of type 'Int!' is required but not provided." -> Some(Pos(4, 15)) )) } @@ -177,8 +177,8 @@ class ProvidedRequiredArgumentsSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'include' argument 'if' of type 'Boolean!' is required but not provided." → Some(Pos(3, 17)), - "Field 'skip' argument 'if' of type 'Boolean!' is required but not provided." → Some(Pos(4, 20)) + "Field 'include' argument 'if' of type 'Boolean!' is required but not provided." -> Some(Pos(3, 17)), + "Field 'skip' argument 'if' of type 'Boolean!' is required but not provided." -> Some(Pos(4, 20)) )) } } diff --git a/src/test/scala/sangria/validation/rules/ScalarLeafsSpec.scala b/src/test/scala/sangria/validation/rules/ScalarLeafsSpec.scala index b33d1d22..0da5b6d2 100644 --- a/src/test/scala/sangria/validation/rules/ScalarLeafsSpec.scala +++ b/src/test/scala/sangria/validation/rules/ScalarLeafsSpec.scala @@ -22,7 +22,7 @@ class ScalarLeafsSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'human' of type 'Human' must have a sub selection." → Some(Pos(3, 11)) + "Field 'human' of type 'Human' must have a sub selection." -> Some(Pos(3, 11)) )) "interface type missing selection" in expectFails( @@ -32,7 +32,7 @@ class ScalarLeafsSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'pets' of type '[Pet]' must have a sub selection." → Some(Pos(3, 19)) + "Field 'pets' of type '[Pet]' must have a sub selection." -> Some(Pos(3, 19)) )) "valid scalar selection with args" in expectPasses( @@ -49,7 +49,7 @@ class ScalarLeafsSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'barks' of type 'Boolean' must not have a sub selection." → Some(Pos(3, 11)) + "Field 'barks' of type 'Boolean' must not have a sub selection." -> Some(Pos(3, 11)) )) "scalar selection not allowed on Enum" in expectFails( @@ -59,7 +59,7 @@ class ScalarLeafsSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'furColor' of type 'FurColor' must not have a sub selection." → Some(Pos(3, 11)) + "Field 'furColor' of type 'FurColor' must not have a sub selection." -> Some(Pos(3, 11)) )) "scalar selection not allowed with args" in expectFails( @@ -69,7 +69,7 @@ class ScalarLeafsSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'doesKnowCommand' of type 'Boolean' must not have a sub selection." → Some(Pos(3, 11)) + "Field 'doesKnowCommand' of type 'Boolean' must not have a sub selection." -> Some(Pos(3, 11)) )) "Scalar selection not allowed with directives" in expectFails( @@ -79,7 +79,7 @@ class ScalarLeafsSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'name' of type 'String' must not have a sub selection." → Some(Pos(3, 11)) + "Field 'name' of type 'String' must not have a sub selection." -> Some(Pos(3, 11)) )) "Scalar selection not allowed with directives and args" in expectFails( @@ -89,7 +89,7 @@ class ScalarLeafsSpec extends WordSpec with ValidationSupport { } """, List( - "Field 'doesKnowCommand' of type 'Boolean' must not have a sub selection." → Some(Pos(3, 11)) + "Field 'doesKnowCommand' of type 'Boolean' must not have a sub selection." -> Some(Pos(3, 11)) )) } } diff --git a/src/test/scala/sangria/validation/rules/SingleFieldSubscriptionsSpec.scala b/src/test/scala/sangria/validation/rules/SingleFieldSubscriptionsSpec.scala index f9930e21..3458318a 100644 --- a/src/test/scala/sangria/validation/rules/SingleFieldSubscriptionsSpec.scala +++ b/src/test/scala/sangria/validation/rules/SingleFieldSubscriptionsSpec.scala @@ -23,7 +23,7 @@ class SingleFieldSubscriptionsSpec extends WordSpec with ValidationSupport { } """, List( - "Subscription 'ImportantEmails' must select only one top level field." → Some(Pos(4, 11)) + "Subscription 'ImportantEmails' must select only one top level field." -> Some(Pos(4, 11)) )) "fails with more than one root field including introspection" in expectFails( @@ -34,7 +34,7 @@ class SingleFieldSubscriptionsSpec extends WordSpec with ValidationSupport { } """, List( - "Subscription 'ImportantEmails' must select only one top level field." → Some(Pos(4, 11)) + "Subscription 'ImportantEmails' must select only one top level field." -> Some(Pos(4, 11)) )) "fails with many more than one root field" in expectFailsPosList( @@ -46,7 +46,7 @@ class SingleFieldSubscriptionsSpec extends WordSpec with ValidationSupport { } """, List( - "Subscription 'ImportantEmails' must select only one top level field." → List(Pos(4, 11), Pos(5, 11)) + "Subscription 'ImportantEmails' must select only one top level field." -> List(Pos(4, 11), Pos(5, 11)) )) "fails with more than one root field in anonymous subscriptions" in expectFailsPosList( @@ -57,7 +57,7 @@ class SingleFieldSubscriptionsSpec extends WordSpec with ValidationSupport { } """, List( - "Anonymous Subscription must select only one top level field." → List(Pos(4, 11)) + "Anonymous Subscription must select only one top level field." -> List(Pos(4, 11)) )) } } diff --git a/src/test/scala/sangria/validation/rules/UniqueArgumentNamesSpec.scala b/src/test/scala/sangria/validation/rules/UniqueArgumentNamesSpec.scala index ad4236fc..e704de7a 100644 --- a/src/test/scala/sangria/validation/rules/UniqueArgumentNamesSpec.scala +++ b/src/test/scala/sangria/validation/rules/UniqueArgumentNamesSpec.scala @@ -79,7 +79,7 @@ class UniqueArgumentNamesSpec extends WordSpec with ValidationSupport { } """, List( - "There can be only one argument named 'arg1'." → Some(Pos(3, 32)) + "There can be only one argument named 'arg1'." -> Some(Pos(3, 32)) )) "many duplicate field arguments" in expectFails( @@ -89,8 +89,8 @@ class UniqueArgumentNamesSpec extends WordSpec with ValidationSupport { } """, List( - "There can be only one argument named 'arg1'." → Some(Pos(3, 32)), - "There can be only one argument named 'arg1'." → Some(Pos(3, 47)) + "There can be only one argument named 'arg1'." -> Some(Pos(3, 32)), + "There can be only one argument named 'arg1'." -> Some(Pos(3, 47)) )) "duplicate directive arguments" in expectFails( @@ -100,7 +100,7 @@ class UniqueArgumentNamesSpec extends WordSpec with ValidationSupport { } """, List( - "There can be only one argument named 'arg1'." → Some(Pos(3, 43)) + "There can be only one argument named 'arg1'." -> Some(Pos(3, 43)) )) "many duplicate directive arguments" in expectFails( @@ -110,8 +110,8 @@ class UniqueArgumentNamesSpec extends WordSpec with ValidationSupport { } """, List( - "There can be only one argument named 'arg1'." → Some(Pos(3, 43)), - "There can be only one argument named 'arg1'." → Some(Pos(3, 58)) + "There can be only one argument named 'arg1'." -> Some(Pos(3, 43)), + "There can be only one argument named 'arg1'." -> Some(Pos(3, 58)) )) } } diff --git a/src/test/scala/sangria/validation/rules/UniqueDirectivesPerLocationSpec.scala b/src/test/scala/sangria/validation/rules/UniqueDirectivesPerLocationSpec.scala index 1c9e1e03..074407cb 100644 --- a/src/test/scala/sangria/validation/rules/UniqueDirectivesPerLocationSpec.scala +++ b/src/test/scala/sangria/validation/rules/UniqueDirectivesPerLocationSpec.scala @@ -51,7 +51,7 @@ class UniqueDirectivesPerLocationSpec extends WordSpec with ValidationSupport { } """, List( - "The directive 'directive' can only be used once at this location." → List(Pos(3, 17), Pos(3, 28)) + "The directive 'directive' can only be used once at this location." -> List(Pos(3, 17), Pos(3, 28)) )) "many duplicate directives in one location" in expectFailsPosList( @@ -61,8 +61,8 @@ class UniqueDirectivesPerLocationSpec extends WordSpec with ValidationSupport { } """, List( - "The directive 'directive' can only be used once at this location." → List(Pos(3, 17), Pos(3, 28)), - "The directive 'directive' can only be used once at this location." → List(Pos(3, 17), Pos(3, 39)) + "The directive 'directive' can only be used once at this location." -> List(Pos(3, 17), Pos(3, 28)), + "The directive 'directive' can only be used once at this location." -> List(Pos(3, 17), Pos(3, 39)) )) "different duplicate directives in one location" in expectFailsPosList( @@ -72,8 +72,8 @@ class UniqueDirectivesPerLocationSpec extends WordSpec with ValidationSupport { } """, List( - "The directive 'directiveA' can only be used once at this location." → List(Pos(3, 17), Pos(3, 41)), - "The directive 'directiveB' can only be used once at this location." → List(Pos(3, 29), Pos(3, 53)) + "The directive 'directiveA' can only be used once at this location." -> List(Pos(3, 17), Pos(3, 41)), + "The directive 'directiveB' can only be used once at this location." -> List(Pos(3, 29), Pos(3, 53)) )) "duplicate directives in many locations" in expectFailsPosList( @@ -83,8 +83,8 @@ class UniqueDirectivesPerLocationSpec extends WordSpec with ValidationSupport { } """, List( - "The directive 'directive' can only be used once at this location." → List(Pos(2, 31), Pos(2, 42)), - "The directive 'directive' can only be used once at this location." → List(Pos(3, 17), Pos(3, 28)) + "The directive 'directive' can only be used once at this location." -> List(Pos(2, 31), Pos(2, 42)), + "The directive 'directive' can only be used once at this location." -> List(Pos(3, 17), Pos(3, 28)) )) } } diff --git a/src/test/scala/sangria/validation/rules/UniqueFragmentNamesSpec.scala b/src/test/scala/sangria/validation/rules/UniqueFragmentNamesSpec.scala index 2e5b3984..6f9a4ecd 100644 --- a/src/test/scala/sangria/validation/rules/UniqueFragmentNamesSpec.scala +++ b/src/test/scala/sangria/validation/rules/UniqueFragmentNamesSpec.scala @@ -79,7 +79,7 @@ class UniqueFragmentNamesSpec extends WordSpec with ValidationSupport { } """, List( - "There can only be one fragment named 'fragA'." → Some(Pos(8, 9)) + "There can only be one fragment named 'fragA'." -> Some(Pos(8, 9)) )) "fragments named the same without being referenced" in expectFails( @@ -92,7 +92,7 @@ class UniqueFragmentNamesSpec extends WordSpec with ValidationSupport { } """, List( - "There can only be one fragment named 'fragA'." → Some(Pos(5, 9)) + "There can only be one fragment named 'fragA'." -> Some(Pos(5, 9)) )) } } diff --git a/src/test/scala/sangria/validation/rules/UniqueInputFieldNamesSpec.scala b/src/test/scala/sangria/validation/rules/UniqueInputFieldNamesSpec.scala index 8951db4b..61ba867c 100644 --- a/src/test/scala/sangria/validation/rules/UniqueInputFieldNamesSpec.scala +++ b/src/test/scala/sangria/validation/rules/UniqueInputFieldNamesSpec.scala @@ -43,7 +43,7 @@ class UniqueInputFieldNamesSpec extends WordSpec with ValidationSupport { } """, List( - "There can be only one input field named 'f1'." → List(Pos(3, 24), Pos(3, 37)) + "There can be only one input field named 'f1'." -> List(Pos(3, 24), Pos(3, 37)) )) "many duplicate input object fields" in expectFailsPosList( @@ -53,8 +53,8 @@ class UniqueInputFieldNamesSpec extends WordSpec with ValidationSupport { } """, List( - "There can be only one input field named 'f1'." → List(Pos(3, 24), Pos(3, 37)), - "There can be only one input field named 'f1'." → List(Pos(3, 24), Pos(3, 50)) + "There can be only one input field named 'f1'." -> List(Pos(3, 24), Pos(3, 37)), + "There can be only one input field named 'f1'." -> List(Pos(3, 24), Pos(3, 50)) )) } } diff --git a/src/test/scala/sangria/validation/rules/UniqueOperationNamesSpec.scala b/src/test/scala/sangria/validation/rules/UniqueOperationNamesSpec.scala index 47a0848a..40daf339 100644 --- a/src/test/scala/sangria/validation/rules/UniqueOperationNamesSpec.scala +++ b/src/test/scala/sangria/validation/rules/UniqueOperationNamesSpec.scala @@ -75,7 +75,7 @@ class UniqueOperationNamesSpec extends WordSpec with ValidationSupport { } """, List( - "There can only be one operation named 'Foo'." → Some(Pos(5, 9)) + "There can only be one operation named 'Foo'." -> Some(Pos(5, 9)) )) "multiple operations of same name of different types (mutation)" in expectFails( @@ -88,7 +88,7 @@ class UniqueOperationNamesSpec extends WordSpec with ValidationSupport { } """, List( - "There can only be one operation named 'Foo'." → Some(Pos(5, 9)) + "There can only be one operation named 'Foo'." -> Some(Pos(5, 9)) )) "multiple operations of same name of different types (subscription)" in expectFails( @@ -101,7 +101,7 @@ class UniqueOperationNamesSpec extends WordSpec with ValidationSupport { } """, List( - "There can only be one operation named 'Foo'." → Some(Pos(5, 9)) + "There can only be one operation named 'Foo'." -> Some(Pos(5, 9)) )) } } diff --git a/src/test/scala/sangria/validation/rules/UniqueVariableNamesSpec.scala b/src/test/scala/sangria/validation/rules/UniqueVariableNamesSpec.scala index 5f90c10a..6fdad9ca 100644 --- a/src/test/scala/sangria/validation/rules/UniqueVariableNamesSpec.scala +++ b/src/test/scala/sangria/validation/rules/UniqueVariableNamesSpec.scala @@ -21,10 +21,10 @@ class UniqueVariableNamesSpec extends WordSpec with ValidationSupport { query C($x: Int, $x: Int) { __typename } """, List( - "There can be only one variable named 'x'." → List(Pos(2, 17), Pos(2, 26)), - "There can be only one variable named 'x'." → List(Pos(2, 17), Pos(2, 35)), - "There can be only one variable named 'x'." → List(Pos(3, 17), Pos(3, 29)), - "There can be only one variable named 'x'." → List(Pos(4, 17), Pos(4, 26)) + "There can be only one variable named 'x'." -> List(Pos(2, 17), Pos(2, 26)), + "There can be only one variable named 'x'." -> List(Pos(2, 17), Pos(2, 35)), + "There can be only one variable named 'x'." -> List(Pos(3, 17), Pos(3, 29)), + "There can be only one variable named 'x'." -> List(Pos(4, 17), Pos(4, 26)) )) } } diff --git a/src/test/scala/sangria/validation/rules/ValuesOfCorrectTypeSpec.scala b/src/test/scala/sangria/validation/rules/ValuesOfCorrectTypeSpec.scala index 8120b85f..39eb7f39 100644 --- a/src/test/scala/sangria/validation/rules/ValuesOfCorrectTypeSpec.scala +++ b/src/test/scala/sangria/validation/rules/ValuesOfCorrectTypeSpec.scala @@ -129,7 +129,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'String', found '1'." → Some(Pos(4, 41)))) + "Expected type 'String', found '1'." -> Some(Pos(4, 41)))) "Float into String" in expectFails( """ @@ -140,7 +140,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'String', found '1.0'." → Some(Pos(4, 41)))) + "Expected type 'String', found '1.0'." -> Some(Pos(4, 41)))) "Boolean into String" in expectFails( """ @@ -151,7 +151,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'String', found 'true'." → Some(Pos(4, 41)))) + "Expected type 'String', found 'true'." -> Some(Pos(4, 41)))) "Unquoted String into String" in expectFails( """ @@ -162,7 +162,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'String', found 'BAR'." → Some(Pos(4, 41)))) + "Expected type 'String', found 'BAR'." -> Some(Pos(4, 41)))) } "Invalid Int values" should { @@ -175,7 +175,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'Int', found '\"3\"'." → Some(Pos(4, 35)))) + "Expected type 'Int', found '\"3\"'." -> Some(Pos(4, 35)))) "Big Int into Int" in expectFails( """ @@ -186,7 +186,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'Int', found '829384293849283498239482938'." → Some(Pos(4, 35)))) + "Expected type 'Int', found '829384293849283498239482938'." -> Some(Pos(4, 35)))) "Unquoted String into Int" in expectFails( """ @@ -197,7 +197,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'Int', found 'FOO'." → Some(Pos(4, 35)))) + "Expected type 'Int', found 'FOO'." -> Some(Pos(4, 35)))) "Simple Float into Int" in expectFails( """ @@ -208,7 +208,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'Int', found '3.0'." → Some(Pos(4, 35)))) + "Expected type 'Int', found '3.0'." -> Some(Pos(4, 35)))) "Float into Int" in expectFails( """ @@ -219,7 +219,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'Int', found '3.333'." → Some(Pos(4, 35)))) + "Expected type 'Int', found '3.333'." -> Some(Pos(4, 35)))) } "Invalid Float values" should { @@ -232,7 +232,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'Float', found '\"3.333\"'." → Some(Pos(4, 39)))) + "Expected type 'Float', found '\"3.333\"'." -> Some(Pos(4, 39)))) "Boolean into Float" in expectFails( """ @@ -243,7 +243,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'Float', found 'true'." → Some(Pos(4, 39)))) + "Expected type 'Float', found 'true'." -> Some(Pos(4, 39)))) "Unquoted into Float" in expectFails( """ @@ -254,7 +254,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'Float', found 'FOO'." → Some(Pos(4, 39)))) + "Expected type 'Float', found 'FOO'." -> Some(Pos(4, 39)))) } "Invalid Boolean value" should { @@ -267,7 +267,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'Boolean', found '2'." → Some(Pos(4, 43)))) + "Expected type 'Boolean', found '2'." -> Some(Pos(4, 43)))) "Float into Boolean" in expectFails( """ @@ -278,7 +278,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'Boolean', found '2.0'." → Some(Pos(4, 43)))) + "Expected type 'Boolean', found '2.0'." -> Some(Pos(4, 43)))) "String into Boolean" in expectFails( """ @@ -289,7 +289,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'Boolean', found '\"true\"'." → Some(Pos(4, 43)))) + "Expected type 'Boolean', found '\"true\"'." -> Some(Pos(4, 43)))) "Unquoted into Boolean" in expectFails( """ @@ -300,7 +300,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'Boolean', found 'TRUE'." → Some(Pos(4, 43)))) + "Expected type 'Boolean', found 'TRUE'." -> Some(Pos(4, 43)))) } "Invalid ID value" should { @@ -313,7 +313,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'ID', found '1.0'." → Some(Pos(4, 33)))) + "Expected type 'ID', found '1.0'." -> Some(Pos(4, 33)))) "Boolean into ID" in expectFails( """ @@ -324,7 +324,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'ID', found 'true'." → Some(Pos(4, 33)))) + "Expected type 'ID', found 'true'." -> Some(Pos(4, 33)))) "Unquoted into ID" in expectFails( """ @@ -335,7 +335,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'ID', found 'SOMETHING'." → Some(Pos(4, 33)))) + "Expected type 'ID', found 'SOMETHING'." -> Some(Pos(4, 33)))) } "Invalid Enum value" should { @@ -348,7 +348,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'DogCommand', found '2'." → Some(Pos(4, 43)))) + "Expected type 'DogCommand', found '2'." -> Some(Pos(4, 43)))) "Float into Enum" in expectFails( """ @@ -359,7 +359,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'DogCommand', found '1.0'." → Some(Pos(4, 43)))) + "Expected type 'DogCommand', found '1.0'." -> Some(Pos(4, 43)))) "String into Enum" in expectFails( """ @@ -370,7 +370,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'DogCommand', found '\"SIT\"'." → Some(Pos(4, 43)))) + "Expected type 'DogCommand', found '\"SIT\"'." -> Some(Pos(4, 43)))) "Boolean into Enum" in expectFails( """ @@ -381,7 +381,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'DogCommand', found 'true'." → Some(Pos(4, 43)))) + "Expected type 'DogCommand', found 'true'." -> Some(Pos(4, 43)))) "Unknown Enum Value into Enum" in expectFails( """ @@ -392,7 +392,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } """, List( - "Expected type 'DogCommand!', found 'JUGGLE'. Enum value 'JUGGLE' is undefined in enum type 'DogCommand'. Known values are: SIT, HEEL, DOWN." → Some(Pos(4, 43)))) + "Expected type 'DogCommand!', found 'JUGGLE'. Enum value 'JUGGLE' is undefined in enum type 'DogCommand'. Known values are: SIT, HEEL, DOWN." -> Some(Pos(4, 43)))) "Different case Enum Value into Enum" in expectFails( """ @@ -402,7 +402,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } } """, - List("Expected type 'DogCommand!', found 'sit'. Enum value 'sit' is undefined in enum type 'DogCommand'. Known values are: SIT, HEEL, DOWN." → Some(Pos(4, 43)))) + List("Expected type 'DogCommand!', found 'sit'. Enum value 'sit' is undefined in enum type 'DogCommand'. Known values are: SIT, HEEL, DOWN." -> Some(Pos(4, 43)))) } "Valid List value" should { @@ -452,7 +452,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } } """, - "Expected type 'String', found '2'. String value expected" → Seq(Pos(4, 57))) + "Expected type 'String', found '2'. String value expected" -> Seq(Pos(4, 57))) "Single value of incorrect type" in expectFailsSimple( """ @@ -462,7 +462,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } } """, - "Expected type '[String]', found '1'. String value expected" → Seq(Pos(4, 49))) + "Expected type '[String]', found '1'. String value expected" -> Seq(Pos(4, 49))) } "Valid non-nullable value" should { @@ -557,8 +557,8 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } } """, - "Expected type 'Int!', found '\"two\"'. Int value expected" → Seq(Pos(4, 34)), - "Expected type 'Int!', found '\"one\"'. Int value expected" → Seq(Pos(4, 47))) + "Expected type 'Int!', found '\"two\"'. Int value expected" -> Seq(Pos(4, 34)), + "Expected type 'Int!', found '\"one\"'. Int value expected" -> Seq(Pos(4, 47))) "Incorrect value and missing argument" in expectFailsSimple( """ @@ -568,7 +568,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } } """, - "Expected type 'Int!', found '\"one\"'. Int value expected" → Seq(Pos(4, 34))) + "Expected type 'Int!', found '\"one\"'. Int value expected" -> Seq(Pos(4, 34))) "Null value" in expectFailsSimple( """ @@ -578,7 +578,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } } """, - "Expected type 'Int!', found 'null'." → Seq(Pos(4, 34))) + "Expected type 'Int!', found 'null'." -> Seq(Pos(4, 34))) } "Valid input object value" should { @@ -658,7 +658,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } } """, - "Field 'ComplexInput.requiredField' of required type 'Boolean!' was not provided." → Seq(Pos(4, 43))) + "Field 'ComplexInput.requiredField' of required type 'Boolean!' was not provided." -> Seq(Pos(4, 43))) "Partial object, invalid field type" in expectFailsSimple( """ @@ -671,7 +671,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } } """, - "Expected type 'String', found '2'. String value expected" → Seq(Pos(5, 42))) + "Expected type 'String', found '2'. String value expected" -> Seq(Pos(5, 42))) "Partial object, null to non-null field" in expectFailsSimple( """ @@ -684,7 +684,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } } """, - "Expected type 'Boolean!', found 'null'." → Seq(Pos(6, 31))) + "Expected type 'Boolean!', found 'null'." -> Seq(Pos(6, 31))) "Partial object, unknown field arg" in expectFailsSimple( """ @@ -697,7 +697,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } } """, - "Field 'unknownField' is not defined by type 'ComplexInput'; Did you mean nonNullField, intField or booleanField?" → Seq(Pos(6, 17))) + "Field 'unknownField' is not defined by type 'ComplexInput'; Did you mean nonNullField, intField or booleanField?" -> Seq(Pos(6, 17))) } "Directive arguments" should { @@ -721,8 +721,8 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { } } """, - "Expected type 'Boolean!', found '\"yes\"'. Boolean value expected" → Seq(Pos(3, 30)), - "Expected type 'Boolean!', found 'ENUM'. Boolean value expected" → Seq(Pos(4, 30))) + "Expected type 'Boolean!', found '\"yes\"'. Boolean value expected" -> Seq(Pos(3, 30)), + "Expected type 'Boolean!', found 'ENUM'. Boolean value expected" -> Seq(Pos(4, 30))) } } @@ -760,9 +760,9 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { dog { name } } """, - "Expected type 'Int!', found 'null'." → Seq(Pos(3, 22)), - "Expected type 'String!', found 'null'." → Seq(Pos(4, 25)), - "Expected type 'Boolean!', found 'null'." → Seq(Pos(5, 47))) + "Expected type 'Int!', found 'null'." -> Seq(Pos(3, 22)), + "Expected type 'String!', found 'null'." -> Seq(Pos(4, 25)), + "Expected type 'Boolean!', found 'null'." -> Seq(Pos(5, 47))) "variables with invalid default values" in expectFailsSimple( """ @@ -774,9 +774,9 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { dog { name } } """, - "Expected type 'Int', found '\"one\"'. Int value expected" → Seq(Pos(3, 21)), - "Expected type 'String', found '4'. String value expected" → Seq(Pos(4, 24)), - "Expected type 'ComplexInput', found '\"notverycomplex\"'." → Seq(Pos(5, 30))) + "Expected type 'Int', found '\"one\"'. Int value expected" -> Seq(Pos(3, 21)), + "Expected type 'String', found '4'. String value expected" -> Seq(Pos(4, 24)), + "Expected type 'ComplexInput', found '\"notverycomplex\"'." -> Seq(Pos(5, 30))) "variables with complex invalid default values" in expectFailsSimple( """ @@ -786,8 +786,8 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { dog { name } } """, - "Expected type 'Boolean!', found '123'. Boolean value expected" → Seq(Pos(3, 47)), - "Expected type 'Int', found '\"abc\"'. Int value expected" → Seq(Pos(3, 62))) + "Expected type 'Boolean!', found '123'. Boolean value expected" -> Seq(Pos(3, 47)), + "Expected type 'Int', found '\"abc\"'. Int value expected" -> Seq(Pos(3, 62))) "complex variables missing required field" in expectFailsSimple( """ @@ -795,7 +795,7 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { dog { name } } """, - "Field 'ComplexInput.requiredField' of required type 'Boolean!' was not provided." → Seq(Pos(2, 55))) + "Field 'ComplexInput.requiredField' of required type 'Boolean!' was not provided." -> Seq(Pos(2, 55))) "list variables with invalid item" in expectFailsSimple( """ @@ -803,6 +803,6 @@ class ValuesOfCorrectTypeSpec extends WordSpec with ValidationSupport { dog { name } } """, - "Expected type 'String', found '2'. String value expected" → Seq(Pos(2, 50))) + "Expected type 'String', found '2'. String value expected" -> Seq(Pos(2, 50))) } } diff --git a/src/test/scala/sangria/validation/rules/VariablesAreInputTypesSpec.scala b/src/test/scala/sangria/validation/rules/VariablesAreInputTypesSpec.scala index 8a4b0e55..8fa035b3 100644 --- a/src/test/scala/sangria/validation/rules/VariablesAreInputTypesSpec.scala +++ b/src/test/scala/sangria/validation/rules/VariablesAreInputTypesSpec.scala @@ -22,9 +22,9 @@ class VariablesAreInputTypesSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$a' cannot be non input type 'Dog'." → Some(Pos(2, 23)), - "Variable '$b' cannot be non input type '[[CatOrDog!]]!'." → Some(Pos(2, 32)), - "Variable '$c' cannot be non input type 'Pet'." → Some(Pos(2, 52)) + "Variable '$a' cannot be non input type 'Dog'." -> Some(Pos(2, 23)), + "Variable '$b' cannot be non input type '[[CatOrDog!]]!'." -> Some(Pos(2, 32)), + "Variable '$c' cannot be non input type 'Pet'." -> Some(Pos(2, 52)) )) } } diff --git a/src/test/scala/sangria/validation/rules/VariablesInAllowedPositionSpec.scala b/src/test/scala/sangria/validation/rules/VariablesInAllowedPositionSpec.scala index ddcce74a..38fb620d 100644 --- a/src/test/scala/sangria/validation/rules/VariablesInAllowedPositionSpec.scala +++ b/src/test/scala/sangria/validation/rules/VariablesInAllowedPositionSpec.scala @@ -8,7 +8,7 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { override val defaultRule = Some(new VariablesInAllowedPosition) "Validate: Variables are in allowed positions" should { - "Boolean ⇒ Boolean" in expectPasses( + "Boolean => Boolean" in expectPasses( """ query Query($booleanArg: Boolean) { @@ -18,7 +18,7 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """) - "Boolean ⇒ Boolean within fragment" in expectPasses( + "Boolean => Boolean within fragment" in expectPasses( """ fragment booleanArgFrag on ComplicatedArgs { booleanArgField(booleanArg: $booleanArg) @@ -31,7 +31,7 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """) - "Boolean ⇒ Boolean within fragment (bonus)" in expectPasses( + "Boolean => Boolean within fragment (bonus)" in expectPasses( """ query Query($booleanArg: Boolean) { @@ -44,7 +44,7 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """) - "Boolean! ⇒ Boolean" in expectPasses( + "Boolean! => Boolean" in expectPasses( """ query Query($nonNullBooleanArg: Boolean!) { @@ -54,7 +54,7 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """) - "Boolean! ⇒ Boolean within fragment" in expectPasses( + "Boolean! => Boolean within fragment" in expectPasses( """ fragment booleanArgFrag on ComplicatedArgs { booleanArgField(booleanArg: $nonNullBooleanArg) @@ -68,7 +68,7 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """) - "[String] ⇒ [String]" in expectPasses( + "[String] => [String]" in expectPasses( """ query Query($stringListVar: [String]) { @@ -78,7 +78,7 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """) - "[String!] ⇒ [String]" in expectPasses( + "[String!] => [String]" in expectPasses( """ query Query($stringListVar: [String!]) { @@ -88,7 +88,7 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """) - "String ⇒ [String] in item position" in expectPasses( + "String => [String] in item position" in expectPasses( """ query Query($stringVar: String) { @@ -98,7 +98,7 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """) - "String! ⇒ [String] in item position" in expectPasses( + "String! => [String] in item position" in expectPasses( """ query Query($stringVar: String!) { @@ -108,7 +108,7 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """) - "ComplexInput ⇒ ComplexInput" in expectPasses( + "ComplexInput => ComplexInput" in expectPasses( """ query Query($complexVar: ComplexInput) { @@ -118,7 +118,7 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """) - "ComplexInput ⇒ ComplexInput in field position" in expectPasses( + "ComplexInput => ComplexInput in field position" in expectPasses( """ query Query($boolVar: Boolean = false) { @@ -128,7 +128,7 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """) - "Boolean! ⇒ Boolean! in directive" in expectPasses( + "Boolean! => Boolean! in directive" in expectPasses( """ query Query($boolVar: Boolean!) { @@ -136,7 +136,7 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """) - "Int ⇒ Int!" in expectFailsPosList( + "Int => Int!" in expectFailsPosList( """ query Query($intArg: Int) { @@ -146,10 +146,10 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$intArg' of type 'Int' used in position expecting type 'Int!'." → List(Pos(2, 21), Pos(5, 47)) + "Variable '$intArg' of type 'Int' used in position expecting type 'Int!'." -> List(Pos(2, 21), Pos(5, 47)) )) - "Int ⇒ Int! within fragment" in expectFailsPosList( + "Int => Int! within fragment" in expectFailsPosList( """ fragment nonNullIntArgFieldFrag on ComplicatedArgs { nonNullIntArgField(nonNullIntArg: $intArg) @@ -163,10 +163,10 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$intArg' of type 'Int' used in position expecting type 'Int!'." → List(Pos(6, 21), Pos(3, 45)) + "Variable '$intArg' of type 'Int' used in position expecting type 'Int!'." -> List(Pos(6, 21), Pos(3, 45)) )) - "Int ⇒ Int! within nested fragment" in expectFailsPosList( + "Int => Int! within nested fragment" in expectFailsPosList( """ fragment outerFrag on ComplicatedArgs { ...nonNullIntArgFieldFrag @@ -184,7 +184,7 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$intArg' of type 'Int' used in position expecting type 'Int!'." → List(Pos(10, 21), Pos(7, 45)) + "Variable '$intArg' of type 'Int' used in position expecting type 'Int!'." -> List(Pos(10, 21), Pos(7, 45)) )) "String over Boolean" in expectFailsPosList( @@ -197,10 +197,10 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$stringVar' of type 'String' used in position expecting type 'Boolean'." → List(Pos(2, 21), Pos(5, 41)) + "Variable '$stringVar' of type 'String' used in position expecting type 'Boolean'." -> List(Pos(2, 21), Pos(5, 41)) )) - "String ⇒ [String]" in expectFailsPosList( + "String => [String]" in expectFailsPosList( """ query Query($stringVar: String) { @@ -210,10 +210,10 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$stringVar' of type 'String' used in position expecting type '[String]'." → List(Pos(2, 21), Pos(5, 47)) + "Variable '$stringVar' of type 'String' used in position expecting type '[String]'." -> List(Pos(2, 21), Pos(5, 47)) )) - "Boolean ⇒ Boolean! in directive" in expectFailsPosList( + "Boolean => Boolean! in directive" in expectFailsPosList( """ query Query($boolVar: Boolean) { @@ -221,10 +221,10 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$boolVar' of type 'Boolean' used in position expecting type 'Boolean!'." → List(Pos(2, 21), Pos(4, 28)) + "Variable '$boolVar' of type 'Boolean' used in position expecting type 'Boolean!'." -> List(Pos(2, 21), Pos(4, 28)) )) - "String ⇒ Boolean! in directive" in expectFailsPosList( + "String => Boolean! in directive" in expectFailsPosList( """ query Query($stringVar: String) { @@ -232,7 +232,7 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } """, List( - "Variable '$stringVar' of type 'String' used in position expecting type 'Boolean!'." → List(Pos(2, 21), Pos(4, 28)) + "Variable '$stringVar' of type 'String' used in position expecting type 'Boolean!'." -> List(Pos(2, 21), Pos(4, 28)) )) "Allows optional (nullable) variables with default values" in expectFailsSimple( @@ -243,7 +243,7 @@ class VariablesInAllowedPositionSpec extends WordSpec with ValidationSupport { } } """, - "Variable '$intVar' of type 'Int' used in position expecting type 'Int!'." → Seq(Pos(2, 21), Pos(4, 47))) + "Variable '$intVar' of type 'Int' used in position expecting type 'Int!'." -> Seq(Pos(2, 21), Pos(4, 47))) "Int => Int! when variable provides non-null default value" in expectPasses( """ From e0b20ff7752e0bd8072dab5741e23261ead58e43 Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Tue, 6 Aug 2019 04:45:17 -0500 Subject: [PATCH 03/26] Update build for 2.13.0 --- build.sbt | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/build.sbt b/build.sbt index ae4d6700..0c271651 100644 --- a/build.sbt +++ b/build.sbt @@ -12,7 +12,7 @@ crossScalaVersions := Seq("2.11.11", "2.12.7", scalaVersion.value) scalacOptions ++= Seq( "-deprecation", "-feature", - "-Xlint:-missing-interpolator,-unused,_") + "-Xlint:-missing-interpolator,_") scalacOptions ++= { if (scalaVersion.value startsWith "2.11") @@ -31,7 +31,7 @@ libraryDependencies ++= Seq( "org.sangria-graphql" %% "macro-visit" % "0.1.2-SNAPSHOT", // Marshalling - "org.sangria-graphql" %% "sangria-marshalling-api" % "1.0.4-SNAPSHOT", + "org.sangria-graphql" %% "sangria-marshalling-api" % "2.0.0-SNAPSHOT", // Streaming "org.sangria-graphql" %% "sangria-streaming-api" % "1.0.1-SNAPSHOT", @@ -41,13 +41,11 @@ libraryDependencies ++= Seq( // Testing "org.scalatest" %% "scalatest" % "3.0.8" % "test", -// "org.sangria-graphql" %% "sangria-marshalling-testkit" % "1.0.1" % Test, -// "org.sangria-graphql" %% "sangria-spray-json" % "1.0.1" % Test, -// "org.sangria-graphql" %% "sangria-argonaut" % "1.0.0" % Test, -// "org.sangria-graphql" %% "sangria-ion" % "1.0.0" % Test, -// "org.sangria-graphql" %% "sangria-monix" % "1.0.0" % Test, -// "org.sangria-graphql" %% "sangria-rxscala" % "1.0.0" % Test, - "eu.timepit" %% "refined" % "0.9.8" % Test, + "org.sangria-graphql" %% "sangria-marshalling-testkit" % "1.0.3-SNAPSHOT" % Test, + "org.sangria-graphql" %% "sangria-spray-json" % "1.0.3-SNAPSHOT" % Test, + "org.sangria-graphql" %% "sangria-argonaut" % "1.0.2-SNAPSHOT" % Test, + "org.sangria-graphql" %% "sangria-ion" % "1.0.1-SNAPSHOT" % Test, + "eu.timepit" %% "refined" % "0.9.9" % Test, // CATs "net.jcazevedo" %% "moultingyaml" % "0.4.1" % Test, From 14690b2cb875c255862efe268969d404f884e5fc Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Tue, 6 Aug 2019 04:46:09 -0500 Subject: [PATCH 04/26] Remove Monix and RxScala-based streaming tests for now --- .../scala/sangria/streaming/StreamSpec.scala | 401 ------------------ 1 file changed, 401 deletions(-) delete mode 100644 src/test/scala/sangria/streaming/StreamSpec.scala diff --git a/src/test/scala/sangria/streaming/StreamSpec.scala b/src/test/scala/sangria/streaming/StreamSpec.scala deleted file mode 100644 index bec90ad7..00000000 --- a/src/test/scala/sangria/streaming/StreamSpec.scala +++ /dev/null @@ -1,401 +0,0 @@ -package sangria.streaming - -import language.postfixOps -import org.scalatest.{Matchers, WordSpec} -import sangria.execution.deferred.{DeferredResolver, Fetcher, HasId} -import sangria.execution.{ExceptionHandler, Executor, HandledException} -import sangria.util.FutureResultSupport -import sangria.schema._ -import sangria.macros._ -import sangria.macros.derive._ -import sangria.validation.QueryValidator -import sangria.validation.rules.SingleFieldSubscriptions - -import scala.concurrent.duration._ -import scala.concurrent.Future - -class StreamSpec extends WordSpec with Matchers with FutureResultSupport { - val timeout = 10 seconds - - "Stream based subscriptions" when { - "using RxScala" should { - "Stream results" in { - import sangria.marshalling.sprayJson._ - import spray.json._ - import rx.lang.scala.Observable - - import sangria.streaming.rxscala._ - - import scala.concurrent.ExecutionContext.Implicits.global - - val QueryType = ObjectType("QueryType", fields[Unit, Unit]( - Field("hello", StringType, resolve = _ => "world") - )) - - val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( - Field.subs("letters", StringType, resolve = _ => - Observable.from(List("a", "b").map(Action(_)))), - - Field.subs("numbers", OptionType(IntType), resolve = _ => - Observable.from(List(1, 2).map(Action(_)))) - )) - - val schema = Schema(QueryType, subscription = Some(SubscriptionType)) - - import sangria.execution.ExecutionScheme.Stream - - val stream: Observable[JsValue] = - Executor.execute(schema, graphql"subscription { letters numbers }", - queryValidator = QueryValidator.default.withoutValidation[SingleFieldSubscriptions]) - - val result = stream.toBlocking.toList - - result should ( - have(size(4)) and - contain("""{"data": {"letters": "a"}}""".parseJson) and - contain("""{"data": {"letters": "b"}}""".parseJson) and - contain("""{"data": {"numbers": 1}}""".parseJson) and - contain("""{"data": {"numbers": 2}}""".parseJson)) - } - } - - "using monix" should { - import _root_.monix.execution.Scheduler.Implicits.global - import _root_.monix.reactive.Observable - import _root_.monix.reactive.subjects.ReplaySubject - - import sangria.marshalling.sprayJson._ - import spray.json._ - - import sangria.streaming.monix._ - - val QueryType = ObjectType("QueryType", fields[Unit, Unit]( - Field("hello", StringType, resolve = _ => "world"))) - - "Stream results with monix" in { - val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( - Field.subs("letters", StringType, resolve = _ => - Observable("a", "b").map(Action(_))), - - Field.subs("numbers", OptionType(IntType), resolve = _ => - Observable(1, 2).map(Action(_))) - )) - - val schema = Schema(QueryType, subscription = Some(SubscriptionType)) - - import sangria.execution.ExecutionScheme.Stream - - val stream: Observable[JsValue] = - Executor.execute(schema, graphql"subscription { letters numbers }", - queryValidator = QueryValidator.default.withoutValidation[SingleFieldSubscriptions]) - - val result = stream.toListL.runAsync.await(timeout) - - result should ( - have(size(4)) and - contain("""{"data": {"letters": "a"}}""".parseJson) and - contain("""{"data": {"letters": "b"}}""".parseJson) and - contain("""{"data": {"numbers": 1}}""".parseJson) and - contain("""{"data": {"numbers": 2}}""".parseJson)) - } - - "recover stream errors" in { - val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( - Field.subs("letters", OptionType(StringType), resolve = _ => - Observable("a", "b", "c", "d", "e").map { l => - if (l == "c") throw new IllegalStateException("foo") - else l - }.map(Action(_))), - - Field.subs("numbers", OptionType(IntType), resolve = _ => - Observable(1, 2, 3, 4).map(Action(_))) - )) - - val schema = Schema(QueryType, subscription = Some(SubscriptionType)) - - import sangria.execution.ExecutionScheme.Stream - - val exceptionHandler = ExceptionHandler { - case (m, e: IllegalStateException) => HandledException(e.getMessage) - } - - val stream: Observable[JsValue] = - Executor.execute(schema, graphql"subscription { letters numbers }", - queryValidator = QueryValidator.default.withoutValidation[SingleFieldSubscriptions], - exceptionHandler = exceptionHandler) - - val result = stream.toListL.runAsync.await(timeout) - - result should ( - contain("""{"data": {"letters": null}, "errors": [{"message": "foo", "path":["letters"]}]}""".parseJson) and - contain("""{"data": {"numbers": 1}}""".parseJson) and - contain("""{"data": {"numbers": 2}}""".parseJson) and - contain("""{"data": {"numbers": 3}}""".parseJson) and - contain("""{"data": {"numbers": 4}}""".parseJson)) - } - - "complex stream scenario" in { - case class Fruit(id: Int, name: String, color: String) - - case class FruitEaten(name: String, eater: String) - case class FruitSmashed(id: Int) - - trait Mutation { - this: Ctx => - - @GraphQLField - def eatFruit(name: String, eater: String): String = { - eventBus.onNext(FruitEaten(name, eater)) - - "OmNomNom" - } - - @GraphQLField - def smashFruit(id: Int) = { - eventBus.onNext(FruitSmashed(id)) - - "Splash!" - } - - @GraphQLField - def stop = { - eventBus.onComplete() - - "Full!" - } - } - - class Ctx extends Mutation { - val eventBus = ReplaySubject[Any]() - } - - val cherryPicker = Fetcher.caching[Ctx, Fruit, Int]( - (ctx, ids) => Future.successful(ids.map(id => Fruit(id, "cherry", "red"))))(HasId(_.id)) - - val FruitType = ObjectType("Fruit", fields[Unit, Fruit]( - Field("name", StringType, resolve = _.value.name), - Field("color", StringType, resolve = _.value.color) - )) - - val FruitEatenType = ObjectType("FruitEaten", fields[Unit, FruitEaten]( - Field("name", StringType, resolve = _.value.name), - Field("eater", StringType, resolve = _.value.eater) - )) - - val FruitSmashedType = ObjectType("FruitSmashed", fields[Unit, FruitSmashed]( - Field("fruit", FruitType, resolve = c => cherryPicker.defer(c.value.id)) - )) - - val FruitEventType = UnionType("FruitEvent", types = FruitEatenType :: FruitSmashedType :: Nil) - - val QueryType = ObjectType("QueryType", fields[Ctx, Unit]( - Field("hello", StringType, resolve = _ => "world"))) - - val MutationType = deriveContextObjectType[Ctx, Mutation, Unit](identity) - - val SubscriptionType = ObjectType("Subscription", fields[Ctx, Unit]( - Field.subs("fruitEvents", OptionType(FruitEventType), resolve = - c => c.ctx.eventBus.map(Action(_))) - )) - - val schema = Schema(QueryType, Some(MutationType), Some(SubscriptionType)) - - import sangria.execution.ExecutionScheme.Stream - - val exceptionHandler = ExceptionHandler { - case (m, e: IllegalStateException) => HandledException(e.getMessage) - } - - val subscription = - graphql""" - subscription { - fruitEvents { - ... on FruitEaten {name, eater} - ... on FruitSmashed { - fruit { - name - color - } - } - } - } - """ - - val ctx = new Ctx - - val stream: Observable[JsValue] = - Executor.execute(schema, subscription, - ctx, - deferredResolver = DeferredResolver.fetchers(cherryPicker), - exceptionHandler = exceptionHandler) - - val mutation = - graphql""" - mutation { - e1: eatFruit(name: "banana", eater: "me") - smashFruit(id: 123) - e2: eatFruit(name: "orange", eater: "someone else") - stop - } - """ - - { - import sangria.execution.ExecutionScheme.Default - - Executor.execute(schema, mutation, - ctx, - deferredResolver = DeferredResolver.fetchers(cherryPicker), - exceptionHandler = exceptionHandler).await(timeout) - } - - val result = stream.toListL.runAsync.await(timeout) - - result should ( - have(size(3)) and - contain("""{"data": {"fruitEvents": {"name": "banana", "eater": "me"}}}""".parseJson) and - contain("""{"data": {"fruitEvents": {"name": "orange", "eater": "someone else"}}}""".parseJson) and - contain("""{"data": {"fruitEvents": {"fruit": {"name": "cherry", "color": "red"}}}}""".parseJson)) - } - } - - "in general" should { - import _root_.monix.execution.Scheduler.Implicits.global - - import sangria.marshalling.sprayJson._ - import spray.json._ - - val QueryType = ObjectType("QueryType", fields[Unit, Unit]( - Field("hello", StringType, resolve = _ => "world"))) - - "return extended stream result" in { - import _root_.monix.reactive.Observable - import sangria.streaming.monix._ - - val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( - Field.subs("letters", OptionType(StringType), resolve = _ => - Observable("a", "b", "c").map(Action(_))))) - - val schema = Schema(QueryType, subscription = Some(SubscriptionType)) - - import sangria.execution.ExecutionScheme.StreamExtended - - val stream = Executor.execute(schema, graphql"subscription { letters }") - - val result = stream.toListL.runAsync.await(timeout) - - result.map(_.result) should ( - have(size(3)) and - contain("""{"data": {"letters": "a"}}""".parseJson) and - contain("""{"data": {"letters": "b"}}""".parseJson) and - contain("""{"data": {"letters": "c"}}""".parseJson)) - } - - "validate that all fields are subscription fields" in { - import _root_.monix.reactive.Observable - import sangria.streaming.monix._ - - val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( - Field.subs("letters", OptionType(StringType), - resolve = _ => Observable("a").map(Action(_))), - Field("hello", StringType, resolve = _ => "world"))) - - val error = intercept [SchemaValidationException] (Schema(QueryType, subscription = Some(SubscriptionType))) - - error.violations.map(_.errorMessage) should ( - have(size(1)) and - contain("Subscription type 'Subscription' may either contain only non-subscription fields or only subscription fields (defined with `Field.subs`). Following fields are non-subscription fields among other subscription fields: 'hello'.")) - } - - "validate that all fields have same stream implementation at schema creation time" in { - val f1 = { - import sangria.streaming.rxscala._ - import rx.lang.scala.Observable - - Field.subs("letters", OptionType(StringType), - resolve = (_: Context[Unit, Unit]) => Observable.from(List("a")).map(Action(_))) - } - - val f2 = { - import sangria.streaming.monix._ - import _root_.monix.reactive.Observable - - Field.subs("otherLetters", OptionType(StringType), - resolve = (_: Context[Unit, Unit]) => Observable("a").map(Action(_))) - } - - val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit](f1, f2)) - - val error = intercept [SchemaValidationException] (Schema(QueryType, subscription = Some(SubscriptionType))) - - error.violations.map(_.errorMessage) should ( - have(size(1)) and - contain("Some fields of subscription type 'Subscription' have incompatible stream implementations: 'otherLetters'.")) - } - - "validate that all fields have same stream implementation at stream merge" in { - val SubscriptionType = { - import _root_.monix.reactive.Observable - import sangria.streaming.monix._ - - ObjectType("Subscription", fields[Unit, Unit]( - Field.subs("letters", OptionType(StringType), resolve = _ => - Observable("a", "b", "c").map(Action(_))))) - } - - val schema = Schema(QueryType, subscription = Some(SubscriptionType)) - - import sangria.streaming.rxscala._ - import sangria.execution.ExecutionScheme.StreamExtended - - val stream = Executor.execute(schema, graphql"subscription { letters }") - - an [IllegalStateException] should be thrownBy stream.toBlocking.toList - } - - "return first result for default execution scheme" in { - import _root_.monix.reactive.Observable - import sangria.streaming.monix._ - - val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( - Field.subs("letters", OptionType(StringType), resolve = _ => - Observable("a", "b").map(Action(_))), - - Field.subs("numbers", OptionType(IntType), resolve = _ => - Observable(1, 2).map(Action(_))) - )) - - val schema = Schema(QueryType, subscription = Some(SubscriptionType)) - - val result = Executor.execute(schema, graphql"subscription { letters numbers }", - queryValidator = QueryValidator.default.withoutValidation[SingleFieldSubscriptions]).await(timeout) - - List(result) should contain oneOf ( - """{"data":{"letters": "a"}}""".parseJson, - """{"data":{"letters": "b"}}""".parseJson, - """{"data":{"numbers": 1}}""".parseJson, - """{"data":{"numbers": 2}}""".parseJson) - } - - "emit one element for non-stream based subscriptions" in { - import sangria.streaming.monix._ - - val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( - Field("letters", OptionType(StringType), resolve = _ => Some("a")), - Field("numbers", IntType, resolve = _ => 10))) - - val schema = Schema(QueryType, subscription = Some(SubscriptionType)) - - import sangria.execution.ExecutionScheme.Stream - - val stream = Executor.execute(schema, graphql"subscription { letters numbers }", - queryValidator = QueryValidator.default.withoutValidation[SingleFieldSubscriptions]) - - val result = stream.toListL.runAsync.await(timeout) - - result should ( - have(size(1)) and - contain("""{"data": {"letters": "a", "numbers": 10}}""".parseJson)) - } - } - } -} From f59ff564fdc39729c20c9440bd68c00b9eeb8e50 Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Tue, 6 Aug 2019 04:46:37 -0500 Subject: [PATCH 05/26] Disambiguate for 2.13 --- src/test/scala/sangria/schema/ArgsSpec.scala | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/test/scala/sangria/schema/ArgsSpec.scala b/src/test/scala/sangria/schema/ArgsSpec.scala index e1cbc1a5..af095feb 100644 --- a/src/test/scala/sangria/schema/ArgsSpec.scala +++ b/src/test/scala/sangria/schema/ArgsSpec.scala @@ -224,10 +224,10 @@ class ArgsSpec extends WordSpec with Matchers { "buildArgs with nested map objects" should { "build with nested arguments" in { - val inputMap = NestedParentArgumentName -> Map( + val inputMap = Map(NestedParentArgumentName -> Map( NonDefaultArgumentName -> 1, DefaultArgumentName -> 2, - OptionalArgumentName -> 3) + OptionalArgumentName -> 3)) val args = Args(List(nestedParentArgument), inputMap) val fields = args.arg(nestedParentArgument).asJsObject.fields @@ -238,17 +238,17 @@ class ArgsSpec extends WordSpec with Matchers { } "not build without required arguments" in { - val inputMap = NestedParentArgumentName -> Map( + val inputMap = Map(NestedParentArgumentName -> Map( DefaultArgumentName -> 2, - OptionalArgumentName -> 3) + OptionalArgumentName -> 3)) an [AttributeCoercionError] should be thrownBy Args(List(nestedParentArgument), inputMap) } "build without default arguments" in { - val inputMap = NestedParentArgumentName -> Map( + val inputMap = Map(NestedParentArgumentName -> Map( NonDefaultArgumentName -> 1, - OptionalArgumentName -> 3) + OptionalArgumentName -> 3)) val args = Args(List(nestedParentArgument), inputMap) val fields = args.arg(nestedParentArgument).asJsObject.fields @@ -259,9 +259,9 @@ class ArgsSpec extends WordSpec with Matchers { } "build without optional arguments" in { - val inputMap = NestedParentArgumentName -> Map( + val inputMap = Map(NestedParentArgumentName -> Map( NonDefaultArgumentName -> 1, - DefaultArgumentName -> 2) + DefaultArgumentName -> 2)) val args = Args(List(nestedParentArgument), inputMap) val fields = args.arg(nestedParentArgument).asJsObject.fields From 7b4f816e50f2bc449932c908b07b682c80c9f44c Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Tue, 6 Aug 2019 04:49:00 -0500 Subject: [PATCH 06/26] Un-overload implicit conversion (see SI-11662) --- src/main/scala/sangria/schema/Schema.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/scala/sangria/schema/Schema.scala b/src/main/scala/sangria/schema/Schema.scala index 521c0bf2..54439cc3 100644 --- a/src/main/scala/sangria/schema/Schema.scala +++ b/src/main/scala/sangria/schema/Schema.scala @@ -264,7 +264,9 @@ object InterfaceType { case class PossibleInterface[Ctx, Concrete](interfaceType: InterfaceType[Ctx, _]) object PossibleInterface extends PossibleInterfaceLowPrioImplicits { - implicit def apply[Ctx, Abstract, Concrete](interface: InterfaceType[Ctx, Abstract])(implicit ev: PossibleType[Abstract, Concrete]): PossibleInterface[Ctx, Concrete] = + def apply[Ctx, Abstract, Concrete](interface: InterfaceType[Ctx, Abstract])(implicit ev: PossibleType[Abstract, Concrete]): PossibleInterface[Ctx, Concrete] = + PossibleInterface[Ctx, Concrete](interface) + implicit def convert[Ctx, Abstract, Concrete](interface: InterfaceType[Ctx, Abstract])(implicit ev: PossibleType[Abstract, Concrete]): PossibleInterface[Ctx, Concrete] = PossibleInterface[Ctx, Concrete](interface) } From 100b529245c8ea33128d24eeb3690dd9f12714a7 Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Tue, 6 Aug 2019 04:37:36 -0500 Subject: [PATCH 07/26] Replace Monix with fs2 in BatchExecutor tests --- build.sbt | 1 + .../execution/batch/BatchExecutorSpec.scala | 19 ++++---- .../sangria/util/Fs2SubscriptionStream.scala | 48 +++++++++++++++++++ 3 files changed, 60 insertions(+), 8 deletions(-) create mode 100644 src/test/scala/sangria/util/Fs2SubscriptionStream.scala diff --git a/build.sbt b/build.sbt index 0c271651..86ba3508 100644 --- a/build.sbt +++ b/build.sbt @@ -40,6 +40,7 @@ libraryDependencies ++= Seq( "org.scala-lang" % "scala-reflect" % scalaVersion.value, // Testing + "co.fs2" %% "fs2-core" % "1.1.0-M1" % Test, "org.scalatest" %% "scalatest" % "3.0.8" % "test", "org.sangria-graphql" %% "sangria-marshalling-testkit" % "1.0.3-SNAPSHOT" % Test, "org.sangria-graphql" %% "sangria-spray-json" % "1.0.3-SNAPSHOT" % Test, diff --git a/src/test/scala/sangria/execution/batch/BatchExecutorSpec.scala b/src/test/scala/sangria/execution/batch/BatchExecutorSpec.scala index 35015f3f..4b8b3d01 100644 --- a/src/test/scala/sangria/execution/batch/BatchExecutorSpec.scala +++ b/src/test/scala/sangria/execution/batch/BatchExecutorSpec.scala @@ -1,18 +1,21 @@ package sangria.execution.batch -import scala.language.higherKinds +import scala.concurrent.ExecutionContext +import cats.effect.{ContextShift, IO} import org.scalatest.{Matchers, WordSpec} import sangria.macros._ import sangria.marshalling._ import sangria.schema._ import sangria.util.{FutureResultSupport, Pos} import spray.json._ -import monix.execution.Scheduler.Implicits.global import sangria.marshalling.sprayJson._ -import sangria.streaming.monix._ +import sangria.util.Fs2Support._ import sangria.util.SimpleGraphQlSupport._ class BatchExecutorSpec extends WordSpec with Matchers with FutureResultSupport { + implicit val ec: ExecutionContext = ExecutionContext.global + implicit val contextShift: ContextShift[IO] = IO.contextShift(ExecutionContext.global) + val IdsArg = Argument("ids", ListInputType(IntType)) val IdArg = Argument("id", IntType) val NameArg = Argument("name", StringType) @@ -108,7 +111,7 @@ class BatchExecutorSpec extends WordSpec with Matchers with FutureResultSupport operationNames = List("q1", "q2", "q3"), variables = vars) - res.toListL.runAsync.await.toSet should be ( + res.compile.toVector.unsafeRunSync.toSet should be ( Set( """ { @@ -189,7 +192,7 @@ class BatchExecutorSpec extends WordSpec with Matchers with FutureResultSupport operationNames = List("q3", "q1", "q2"), middleware = BatchExecutor.OperationNameExtension :: Nil) - res.toListL.runAsync.await.toSet should be ( + res.compile.toVector.unsafeRunSync.toSet should be ( Set( """ { @@ -259,7 +262,7 @@ class BatchExecutorSpec extends WordSpec with Matchers with FutureResultSupport val res = BatchExecutor.executeBatch(schema, query, operationNames = List("q1", "q2")) - res.toListL.runAsync.await.toSet should be ( + res.compile.toVector.unsafeRunSync.toSet should be ( Set( """ { @@ -320,7 +323,7 @@ class BatchExecutorSpec extends WordSpec with Matchers with FutureResultSupport """ checkContainsViolations( - BatchExecutor.executeBatch(schema, query, operationNames = List("q1", "q2")).toListL.runAsync.await, + BatchExecutor.executeBatch(schema, query, operationNames = List("q1", "q2")).compile.toVector.unsafeRunSync, "Inferred variable '$ids' in operation 'q2' is used with two conflicting types: '[Int!]!' and 'Int!'." -> List(Pos(7, 24), Pos(8, 24)), "Inferred variable '$ids' in operation 'q2' is used with two conflicting types: '[Int!]!' and 'String!'." -> List(Pos(7, 24), Pos(10, 25))) } @@ -353,7 +356,7 @@ class BatchExecutorSpec extends WordSpec with Matchers with FutureResultSupport """ checkContainsViolations( - BatchExecutor.executeBatch(schema, query, operationNames = List("q1", "q2", "q3")).toListL.runAsync.await, + BatchExecutor.executeBatch(schema, query, operationNames = List("q1", "q2", "q3")).compile.toVector.unsafeRunSync, "Operation 'q1' has a circular dependency at path 'q1($from3) -> q3($from2) -> q2($from1) -> q1'." -> List(Pos(2, 11)), "Operation 'q3' has a circular dependency at path 'q3($from2) -> q2($from1) -> q1($from3) -> q3'." -> List(Pos(20, 11)), "Operation 'q2' has a circular dependency at path 'q2($from1) -> q1($from3) -> q3($from2) -> q2'." -> List(Pos(14, 11))) diff --git a/src/test/scala/sangria/util/Fs2SubscriptionStream.scala b/src/test/scala/sangria/util/Fs2SubscriptionStream.scala new file mode 100644 index 00000000..043230d3 --- /dev/null +++ b/src/test/scala/sangria/util/Fs2SubscriptionStream.scala @@ -0,0 +1,48 @@ +package sangria.util + +import cats.effect.{ContextShift, IO} +import fs2.Stream +import sangria.streaming.SubscriptionStream +import scala.concurrent.Future +import scala.language.higherKinds + +object Fs2Support { + type IOS[A] = Stream[IO, A] + + class Fs2SubscriptionStream(implicit CS: ContextShift[IO]) extends SubscriptionStream[IOS] { + def supported[T[_]](other: SubscriptionStream[T]) = other.isInstanceOf[Fs2SubscriptionStream] + + def map[A, B](source: IOS[A])(fn: A => B) = source.map(fn) + + def singleFuture[T](value: Future[T]) = + Stream.eval(IO.fromFuture(IO(value))) + + def single[T](value: T) = Stream.emit(value) + + def mapFuture[A, B](source: IOS[A])(fn: A => Future[B]) = + source.evalMap(a => IO.fromFuture(IO(fn(a)))) + + def first[T](s: IOS[T]) = + s.compile.toVector.map(_.head).unsafeToFuture + + def failed[T](e: Throwable) = Stream.raiseError[IO](e) + + def onComplete[Ctx, Res](result: IOS[Res])(op: => Unit) = + result.onFinalize(IO(op)) + + def flatMapFuture[Ctx, Res, T](future: Future[T])(resultFn: T => IOS[Res]) = + Stream.eval(IO.fromFuture(IO(future))).flatMap(resultFn) + + def merge[T](streams: Vector[IOS[T]]) = + if (streams.nonEmpty) + streams.tail.foldLeft(streams.head)(_.merge(_)) + else + throw new IllegalStateException("No streams produced!") + + def recover[T](stream: IOS[T])(fn: Throwable => T) = + stream.handleErrorWith { case e => Stream.emit(fn(e)) } + } + + implicit def observableSubscriptionStream(implicit CS: ContextShift[IO]): SubscriptionStream[IOS] = + new Fs2SubscriptionStream +} From 17b69f6d88dce11807f23fcee1c907d98193fe44 Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Tue, 6 Aug 2019 04:38:55 -0500 Subject: [PATCH 08/26] Replace Monix's AtomicInt with AtomicInteger --- src/test/scala/sangria/execution/QueryReducerSpec.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/scala/sangria/execution/QueryReducerSpec.scala b/src/test/scala/sangria/execution/QueryReducerSpec.scala index 6eee64a2..a1a4e9f6 100644 --- a/src/test/scala/sangria/execution/QueryReducerSpec.scala +++ b/src/test/scala/sangria/execution/QueryReducerSpec.scala @@ -1,6 +1,6 @@ package sangria.execution -import monix.execution.atomic.AtomicInt +import java.util.concurrent.atomic.AtomicInteger import org.scalatest.{Matchers, WordSpec} import sangria.ast import sangria.execution.QueryReducer.ArgumentValuesFn @@ -643,7 +643,7 @@ class QueryReducerSpec extends WordSpec with Matchers with FutureResultSupport { def calcDepth(queryStr: String): Int = { val Success(query) = QueryParser.parse(queryStr) - val depth = AtomicInt(0) + val depth = new AtomicInteger(0) val reducer = QueryReducer.measureDepth[Any]((d, ctx) => { depth.set(d) ctx From cf5dc6b2c548f453be8afd3f919f9d0c01a5ed08 Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Tue, 6 Aug 2019 07:11:11 -0500 Subject: [PATCH 09/26] Use unordered comparison for ProjectedNames in tests --- .../sangria/execution/ProjectorSpec.scala | 32 ++++++++++++------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/src/test/scala/sangria/execution/ProjectorSpec.scala b/src/test/scala/sangria/execution/ProjectorSpec.scala index 18d5c1b4..b7579ccf 100644 --- a/src/test/scala/sangria/execution/ProjectorSpec.scala +++ b/src/test/scala/sangria/execution/ProjectorSpec.scala @@ -95,6 +95,13 @@ class ProjectorSpec extends WordSpec with Matchers with FutureResultSupport { } } + def compareUnorderedProjectedNames(x: ProjectedName, y: ProjectedName): Boolean = + (x.name == y.name) && + (x.children.size == y.children.size) && + (x.children.sortBy(_.name).zip(y.children.sortBy(_.name)).forall { + case (xc, yc) => compareUnorderedProjectedNames(xc, yc) + }) + "Projector" should { "project all fields except explicitly marked with `NoProjection`" in { val Success(query) = QueryParser.parse( @@ -177,19 +184,22 @@ class ProjectorSpec extends WordSpec with Matchers with FutureResultSupport { "typeId" -> "product", "variants" -> Nil))))) - ctx.allProjections should be ( - Vector( + val expected = Vector( + ProjectedName("id", Vector.empty), + ProjectedName("variants", Vector( ProjectedName("id", Vector.empty), - ProjectedName("variants", Vector( + ProjectedName("attributes", Vector( + ProjectedName("strValue", Vector.empty), + ProjectedName("name", Vector.empty), + ProjectedName("intValue", Vector.empty))), + ProjectedName("rp", Vector( ProjectedName("id", Vector.empty), - ProjectedName("attributes", Vector( - ProjectedName("strValue", Vector.empty), - ProjectedName("name", Vector.empty), - ProjectedName("intValue", Vector.empty))), - ProjectedName("rp", Vector( - ProjectedName("id", Vector.empty), - ProjectedName("variants", Vector( - ProjectedName("id", Vector.empty))))))))) + ProjectedName("variants", Vector( + ProjectedName("id", Vector.empty)))))))) + + ctx.allProjections.zip(expected).map { + case (x, y) => compareUnorderedProjectedNames(x, y) + } ctx.oneLevelprojections should be ( Vector( From 53a5e8d0198f8f238d4cacb5837349bfa0e66aef Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Tue, 6 Aug 2019 07:44:55 -0500 Subject: [PATCH 10/26] Fix tests for Parboiled2 updates --- build.sbt | 2 +- src/test/scala/sangria/parser/QueryParserSpec.scala | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/build.sbt b/build.sbt index 86ba3508..bc7ab062 100644 --- a/build.sbt +++ b/build.sbt @@ -25,7 +25,7 @@ testOptions in Test += Tests.Argument(TestFrameworks.ScalaTest, "-oF") libraryDependencies ++= Seq( // AST Parser - "org.parboiled" %% "parboiled" % "2.1.7", + "org.parboiled" %% "parboiled" % "2.1.8", // AST Visitor "org.sangria-graphql" %% "macro-visit" % "0.1.2-SNAPSHOT", diff --git a/src/test/scala/sangria/parser/QueryParserSpec.scala b/src/test/scala/sangria/parser/QueryParserSpec.scala index 8fd93d39..73e27b58 100644 --- a/src/test/scala/sangria/parser/QueryParserSpec.scala +++ b/src/test/scala/sangria/parser/QueryParserSpec.scala @@ -1232,7 +1232,7 @@ class QueryParserSpec extends WordSpec with Matchers with StringMatchers { "query Foo($x: Complex = { a: { b: [ $var ] } }) { field }") error.getMessage should equal ( - """Syntax error while parsing GraphQL query. Invalid input '$', expected StringValue, BooleanValue, ObjectValueConst, NullValue, ListValueConst, EnumValue or NumberValue (line 1, column 37): + """Syntax error while parsing GraphQL query. Invalid input '$', expected NumberValue, StringValue, BooleanValue, NullValue, EnumValue, ListValueConst or ObjectValueConst (line 1, column 37): |query Foo($x: Complex = { a: { b: [ $var ] } }) { field } | ^""".stripMargin) (after being strippedOfCarriageReturns) } @@ -1252,7 +1252,7 @@ class QueryParserSpec extends WordSpec with Matchers with StringMatchers { "query Foo($x: Complex = .123) { field }") error.formattedError should equal ( - """Invalid input '.', expected StringValue, BooleanValue, ObjectValueConst, NullValue, ListValueConst, EnumValue or NumberValue (line 1, column 25): + """Invalid input '.', expected NumberValue, StringValue, BooleanValue, NullValue, EnumValue, ListValueConst or ObjectValueConst (line 1, column 25): |query Foo($x: Complex = .123) { field } | ^""".stripMargin) (after being strippedOfCarriageReturns) } @@ -1282,7 +1282,7 @@ class QueryParserSpec extends WordSpec with Matchers with StringMatchers { "query Foo($x: Complex = +1) { field }") error.formattedError should equal ( - """Invalid input '+', expected StringValue, BooleanValue, ObjectValueConst, NullValue, ListValueConst, EnumValue or NumberValue (line 1, column 25): + """Invalid input '+', expected NumberValue, StringValue, BooleanValue, NullValue, EnumValue, ListValueConst or ObjectValueConst (line 1, column 25): |query Foo($x: Complex = +1) { field } | ^""".stripMargin) (after being strippedOfCarriageReturns) } From 8e065009d39f6fe59c65af91a5b61c924acf57f7 Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Tue, 6 Aug 2019 07:57:34 -0500 Subject: [PATCH 11/26] Fix some tests that depended on map traversal order --- .../execution/deferred/FetcherSpec.scala | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/test/scala/sangria/execution/deferred/FetcherSpec.scala b/src/test/scala/sangria/execution/deferred/FetcherSpec.scala index baa12c38..993c7900 100644 --- a/src/test/scala/sangria/execution/deferred/FetcherSpec.scala +++ b/src/test/scala/sangria/execution/deferred/FetcherSpec.scala @@ -205,14 +205,14 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { val resCached = Executor.execute(schema(fetcherCached), query, new Repo, deferredResolver = DeferredResolver.fetchers(fetcherCached)).await - fetchedIds should be (Vector( - Vector("1", "non-existing", "8"), - Vector("3", "4", "5", "2", "foo!"), + fetchedIds.map(_.sorted) should be (Vector( + Vector("1", "8", "non-existing"), + Vector("2", "3", "4", "5", "foo!"), Vector("5", "6", "7"))) - fetchedIdsCached should be (Vector( - Vector("1", "non-existing", "8"), - Vector("3", "4", "5", "2", "foo!"), + fetchedIdsCached.map(_.sorted) should be (Vector( + Vector("1", "8", "non-existing"), + Vector("2", "3", "4", "5", "foo!"), Vector("6", "7"))) List(res, resCached) foreach (_ should be ( @@ -591,9 +591,9 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { userContext = new Repo) } - fetchedIds should be (Vector( + fetchedIds.map(_.sorted) should be (Vector( Vector("1"), - Vector("3", "4", "2"), + Vector("2", "3", "4"), Vector("5", "6", "7"))) } @@ -749,9 +749,9 @@ class FetcherSpec extends WordSpec with Matchers with FutureResultSupport { resolver = DeferredResolver.fetchers(fetcherCat, fetcherProd), userContext = new Repo) - fetchedCatIds should be (Vector( + fetchedCatIds.map(_.sorted) should be (Vector( Vector("1"), - Vector("3", "4", "2"), + Vector("2", "3", "4"), Vector("5", "6", "7"), Vector("4", "5", "6", "7"), Vector("4", "5", "6", "7"))) From 296e2b79384d0abf8e9dda51c55cca5beb31fd3d Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Mon, 16 Sep 2019 02:41:43 -0500 Subject: [PATCH 12/26] Update fs2 dependency for tests --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index bc7ab062..7292ecfc 100644 --- a/build.sbt +++ b/build.sbt @@ -40,7 +40,7 @@ libraryDependencies ++= Seq( "org.scala-lang" % "scala-reflect" % scalaVersion.value, // Testing - "co.fs2" %% "fs2-core" % "1.1.0-M1" % Test, + "co.fs2" %% "fs2-core" % "2.0.0" % Test, "org.scalatest" %% "scalatest" % "3.0.8" % "test", "org.sangria-graphql" %% "sangria-marshalling-testkit" % "1.0.3-SNAPSHOT" % Test, "org.sangria-graphql" %% "sangria-spray-json" % "1.0.3-SNAPSHOT" % Test, From 5ffabea04400c83567c9401fce8a2bf22acde58e Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Mon, 16 Sep 2019 02:41:47 -0500 Subject: [PATCH 13/26] Revert "Remove Monix and RxScala-based streaming tests for now" This reverts commit 14690b2cb875c255862efe268969d404f884e5fc. --- .../scala/sangria/streaming/StreamSpec.scala | 401 ++++++++++++++++++ 1 file changed, 401 insertions(+) create mode 100644 src/test/scala/sangria/streaming/StreamSpec.scala diff --git a/src/test/scala/sangria/streaming/StreamSpec.scala b/src/test/scala/sangria/streaming/StreamSpec.scala new file mode 100644 index 00000000..bec90ad7 --- /dev/null +++ b/src/test/scala/sangria/streaming/StreamSpec.scala @@ -0,0 +1,401 @@ +package sangria.streaming + +import language.postfixOps +import org.scalatest.{Matchers, WordSpec} +import sangria.execution.deferred.{DeferredResolver, Fetcher, HasId} +import sangria.execution.{ExceptionHandler, Executor, HandledException} +import sangria.util.FutureResultSupport +import sangria.schema._ +import sangria.macros._ +import sangria.macros.derive._ +import sangria.validation.QueryValidator +import sangria.validation.rules.SingleFieldSubscriptions + +import scala.concurrent.duration._ +import scala.concurrent.Future + +class StreamSpec extends WordSpec with Matchers with FutureResultSupport { + val timeout = 10 seconds + + "Stream based subscriptions" when { + "using RxScala" should { + "Stream results" in { + import sangria.marshalling.sprayJson._ + import spray.json._ + import rx.lang.scala.Observable + + import sangria.streaming.rxscala._ + + import scala.concurrent.ExecutionContext.Implicits.global + + val QueryType = ObjectType("QueryType", fields[Unit, Unit]( + Field("hello", StringType, resolve = _ => "world") + )) + + val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( + Field.subs("letters", StringType, resolve = _ => + Observable.from(List("a", "b").map(Action(_)))), + + Field.subs("numbers", OptionType(IntType), resolve = _ => + Observable.from(List(1, 2).map(Action(_)))) + )) + + val schema = Schema(QueryType, subscription = Some(SubscriptionType)) + + import sangria.execution.ExecutionScheme.Stream + + val stream: Observable[JsValue] = + Executor.execute(schema, graphql"subscription { letters numbers }", + queryValidator = QueryValidator.default.withoutValidation[SingleFieldSubscriptions]) + + val result = stream.toBlocking.toList + + result should ( + have(size(4)) and + contain("""{"data": {"letters": "a"}}""".parseJson) and + contain("""{"data": {"letters": "b"}}""".parseJson) and + contain("""{"data": {"numbers": 1}}""".parseJson) and + contain("""{"data": {"numbers": 2}}""".parseJson)) + } + } + + "using monix" should { + import _root_.monix.execution.Scheduler.Implicits.global + import _root_.monix.reactive.Observable + import _root_.monix.reactive.subjects.ReplaySubject + + import sangria.marshalling.sprayJson._ + import spray.json._ + + import sangria.streaming.monix._ + + val QueryType = ObjectType("QueryType", fields[Unit, Unit]( + Field("hello", StringType, resolve = _ => "world"))) + + "Stream results with monix" in { + val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( + Field.subs("letters", StringType, resolve = _ => + Observable("a", "b").map(Action(_))), + + Field.subs("numbers", OptionType(IntType), resolve = _ => + Observable(1, 2).map(Action(_))) + )) + + val schema = Schema(QueryType, subscription = Some(SubscriptionType)) + + import sangria.execution.ExecutionScheme.Stream + + val stream: Observable[JsValue] = + Executor.execute(schema, graphql"subscription { letters numbers }", + queryValidator = QueryValidator.default.withoutValidation[SingleFieldSubscriptions]) + + val result = stream.toListL.runAsync.await(timeout) + + result should ( + have(size(4)) and + contain("""{"data": {"letters": "a"}}""".parseJson) and + contain("""{"data": {"letters": "b"}}""".parseJson) and + contain("""{"data": {"numbers": 1}}""".parseJson) and + contain("""{"data": {"numbers": 2}}""".parseJson)) + } + + "recover stream errors" in { + val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( + Field.subs("letters", OptionType(StringType), resolve = _ => + Observable("a", "b", "c", "d", "e").map { l => + if (l == "c") throw new IllegalStateException("foo") + else l + }.map(Action(_))), + + Field.subs("numbers", OptionType(IntType), resolve = _ => + Observable(1, 2, 3, 4).map(Action(_))) + )) + + val schema = Schema(QueryType, subscription = Some(SubscriptionType)) + + import sangria.execution.ExecutionScheme.Stream + + val exceptionHandler = ExceptionHandler { + case (m, e: IllegalStateException) => HandledException(e.getMessage) + } + + val stream: Observable[JsValue] = + Executor.execute(schema, graphql"subscription { letters numbers }", + queryValidator = QueryValidator.default.withoutValidation[SingleFieldSubscriptions], + exceptionHandler = exceptionHandler) + + val result = stream.toListL.runAsync.await(timeout) + + result should ( + contain("""{"data": {"letters": null}, "errors": [{"message": "foo", "path":["letters"]}]}""".parseJson) and + contain("""{"data": {"numbers": 1}}""".parseJson) and + contain("""{"data": {"numbers": 2}}""".parseJson) and + contain("""{"data": {"numbers": 3}}""".parseJson) and + contain("""{"data": {"numbers": 4}}""".parseJson)) + } + + "complex stream scenario" in { + case class Fruit(id: Int, name: String, color: String) + + case class FruitEaten(name: String, eater: String) + case class FruitSmashed(id: Int) + + trait Mutation { + this: Ctx => + + @GraphQLField + def eatFruit(name: String, eater: String): String = { + eventBus.onNext(FruitEaten(name, eater)) + + "OmNomNom" + } + + @GraphQLField + def smashFruit(id: Int) = { + eventBus.onNext(FruitSmashed(id)) + + "Splash!" + } + + @GraphQLField + def stop = { + eventBus.onComplete() + + "Full!" + } + } + + class Ctx extends Mutation { + val eventBus = ReplaySubject[Any]() + } + + val cherryPicker = Fetcher.caching[Ctx, Fruit, Int]( + (ctx, ids) => Future.successful(ids.map(id => Fruit(id, "cherry", "red"))))(HasId(_.id)) + + val FruitType = ObjectType("Fruit", fields[Unit, Fruit]( + Field("name", StringType, resolve = _.value.name), + Field("color", StringType, resolve = _.value.color) + )) + + val FruitEatenType = ObjectType("FruitEaten", fields[Unit, FruitEaten]( + Field("name", StringType, resolve = _.value.name), + Field("eater", StringType, resolve = _.value.eater) + )) + + val FruitSmashedType = ObjectType("FruitSmashed", fields[Unit, FruitSmashed]( + Field("fruit", FruitType, resolve = c => cherryPicker.defer(c.value.id)) + )) + + val FruitEventType = UnionType("FruitEvent", types = FruitEatenType :: FruitSmashedType :: Nil) + + val QueryType = ObjectType("QueryType", fields[Ctx, Unit]( + Field("hello", StringType, resolve = _ => "world"))) + + val MutationType = deriveContextObjectType[Ctx, Mutation, Unit](identity) + + val SubscriptionType = ObjectType("Subscription", fields[Ctx, Unit]( + Field.subs("fruitEvents", OptionType(FruitEventType), resolve = + c => c.ctx.eventBus.map(Action(_))) + )) + + val schema = Schema(QueryType, Some(MutationType), Some(SubscriptionType)) + + import sangria.execution.ExecutionScheme.Stream + + val exceptionHandler = ExceptionHandler { + case (m, e: IllegalStateException) => HandledException(e.getMessage) + } + + val subscription = + graphql""" + subscription { + fruitEvents { + ... on FruitEaten {name, eater} + ... on FruitSmashed { + fruit { + name + color + } + } + } + } + """ + + val ctx = new Ctx + + val stream: Observable[JsValue] = + Executor.execute(schema, subscription, + ctx, + deferredResolver = DeferredResolver.fetchers(cherryPicker), + exceptionHandler = exceptionHandler) + + val mutation = + graphql""" + mutation { + e1: eatFruit(name: "banana", eater: "me") + smashFruit(id: 123) + e2: eatFruit(name: "orange", eater: "someone else") + stop + } + """ + + { + import sangria.execution.ExecutionScheme.Default + + Executor.execute(schema, mutation, + ctx, + deferredResolver = DeferredResolver.fetchers(cherryPicker), + exceptionHandler = exceptionHandler).await(timeout) + } + + val result = stream.toListL.runAsync.await(timeout) + + result should ( + have(size(3)) and + contain("""{"data": {"fruitEvents": {"name": "banana", "eater": "me"}}}""".parseJson) and + contain("""{"data": {"fruitEvents": {"name": "orange", "eater": "someone else"}}}""".parseJson) and + contain("""{"data": {"fruitEvents": {"fruit": {"name": "cherry", "color": "red"}}}}""".parseJson)) + } + } + + "in general" should { + import _root_.monix.execution.Scheduler.Implicits.global + + import sangria.marshalling.sprayJson._ + import spray.json._ + + val QueryType = ObjectType("QueryType", fields[Unit, Unit]( + Field("hello", StringType, resolve = _ => "world"))) + + "return extended stream result" in { + import _root_.monix.reactive.Observable + import sangria.streaming.monix._ + + val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( + Field.subs("letters", OptionType(StringType), resolve = _ => + Observable("a", "b", "c").map(Action(_))))) + + val schema = Schema(QueryType, subscription = Some(SubscriptionType)) + + import sangria.execution.ExecutionScheme.StreamExtended + + val stream = Executor.execute(schema, graphql"subscription { letters }") + + val result = stream.toListL.runAsync.await(timeout) + + result.map(_.result) should ( + have(size(3)) and + contain("""{"data": {"letters": "a"}}""".parseJson) and + contain("""{"data": {"letters": "b"}}""".parseJson) and + contain("""{"data": {"letters": "c"}}""".parseJson)) + } + + "validate that all fields are subscription fields" in { + import _root_.monix.reactive.Observable + import sangria.streaming.monix._ + + val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( + Field.subs("letters", OptionType(StringType), + resolve = _ => Observable("a").map(Action(_))), + Field("hello", StringType, resolve = _ => "world"))) + + val error = intercept [SchemaValidationException] (Schema(QueryType, subscription = Some(SubscriptionType))) + + error.violations.map(_.errorMessage) should ( + have(size(1)) and + contain("Subscription type 'Subscription' may either contain only non-subscription fields or only subscription fields (defined with `Field.subs`). Following fields are non-subscription fields among other subscription fields: 'hello'.")) + } + + "validate that all fields have same stream implementation at schema creation time" in { + val f1 = { + import sangria.streaming.rxscala._ + import rx.lang.scala.Observable + + Field.subs("letters", OptionType(StringType), + resolve = (_: Context[Unit, Unit]) => Observable.from(List("a")).map(Action(_))) + } + + val f2 = { + import sangria.streaming.monix._ + import _root_.monix.reactive.Observable + + Field.subs("otherLetters", OptionType(StringType), + resolve = (_: Context[Unit, Unit]) => Observable("a").map(Action(_))) + } + + val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit](f1, f2)) + + val error = intercept [SchemaValidationException] (Schema(QueryType, subscription = Some(SubscriptionType))) + + error.violations.map(_.errorMessage) should ( + have(size(1)) and + contain("Some fields of subscription type 'Subscription' have incompatible stream implementations: 'otherLetters'.")) + } + + "validate that all fields have same stream implementation at stream merge" in { + val SubscriptionType = { + import _root_.monix.reactive.Observable + import sangria.streaming.monix._ + + ObjectType("Subscription", fields[Unit, Unit]( + Field.subs("letters", OptionType(StringType), resolve = _ => + Observable("a", "b", "c").map(Action(_))))) + } + + val schema = Schema(QueryType, subscription = Some(SubscriptionType)) + + import sangria.streaming.rxscala._ + import sangria.execution.ExecutionScheme.StreamExtended + + val stream = Executor.execute(schema, graphql"subscription { letters }") + + an [IllegalStateException] should be thrownBy stream.toBlocking.toList + } + + "return first result for default execution scheme" in { + import _root_.monix.reactive.Observable + import sangria.streaming.monix._ + + val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( + Field.subs("letters", OptionType(StringType), resolve = _ => + Observable("a", "b").map(Action(_))), + + Field.subs("numbers", OptionType(IntType), resolve = _ => + Observable(1, 2).map(Action(_))) + )) + + val schema = Schema(QueryType, subscription = Some(SubscriptionType)) + + val result = Executor.execute(schema, graphql"subscription { letters numbers }", + queryValidator = QueryValidator.default.withoutValidation[SingleFieldSubscriptions]).await(timeout) + + List(result) should contain oneOf ( + """{"data":{"letters": "a"}}""".parseJson, + """{"data":{"letters": "b"}}""".parseJson, + """{"data":{"numbers": 1}}""".parseJson, + """{"data":{"numbers": 2}}""".parseJson) + } + + "emit one element for non-stream based subscriptions" in { + import sangria.streaming.monix._ + + val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( + Field("letters", OptionType(StringType), resolve = _ => Some("a")), + Field("numbers", IntType, resolve = _ => 10))) + + val schema = Schema(QueryType, subscription = Some(SubscriptionType)) + + import sangria.execution.ExecutionScheme.Stream + + val stream = Executor.execute(schema, graphql"subscription { letters numbers }", + queryValidator = QueryValidator.default.withoutValidation[SingleFieldSubscriptions]) + + val result = stream.toListL.runAsync.await(timeout) + + result should ( + have(size(1)) and + contain("""{"data": {"letters": "a", "numbers": 10}}""".parseJson)) + } + } + } +} From 06ba4ed808597df56375b802278c8d0f49ab6ac6 Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Mon, 16 Sep 2019 03:09:40 -0500 Subject: [PATCH 14/26] Reinstate Monix streaming tests --- build.sbt | 1 + .../scala/sangria/streaming/StreamSpec.scala | 97 +------------------ 2 files changed, 6 insertions(+), 92 deletions(-) diff --git a/build.sbt b/build.sbt index 7292ecfc..75473e5a 100644 --- a/build.sbt +++ b/build.sbt @@ -46,6 +46,7 @@ libraryDependencies ++= Seq( "org.sangria-graphql" %% "sangria-spray-json" % "1.0.3-SNAPSHOT" % Test, "org.sangria-graphql" %% "sangria-argonaut" % "1.0.2-SNAPSHOT" % Test, "org.sangria-graphql" %% "sangria-ion" % "1.0.1-SNAPSHOT" % Test, + "org.sangria-graphql" %% "sangria-monix" % "2.0.0-SNAPSHOT" % Test, "eu.timepit" %% "refined" % "0.9.9" % Test, // CATs diff --git a/src/test/scala/sangria/streaming/StreamSpec.scala b/src/test/scala/sangria/streaming/StreamSpec.scala index bec90ad7..d1b8a348 100644 --- a/src/test/scala/sangria/streaming/StreamSpec.scala +++ b/src/test/scala/sangria/streaming/StreamSpec.scala @@ -18,47 +18,6 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { val timeout = 10 seconds "Stream based subscriptions" when { - "using RxScala" should { - "Stream results" in { - import sangria.marshalling.sprayJson._ - import spray.json._ - import rx.lang.scala.Observable - - import sangria.streaming.rxscala._ - - import scala.concurrent.ExecutionContext.Implicits.global - - val QueryType = ObjectType("QueryType", fields[Unit, Unit]( - Field("hello", StringType, resolve = _ => "world") - )) - - val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit]( - Field.subs("letters", StringType, resolve = _ => - Observable.from(List("a", "b").map(Action(_)))), - - Field.subs("numbers", OptionType(IntType), resolve = _ => - Observable.from(List(1, 2).map(Action(_)))) - )) - - val schema = Schema(QueryType, subscription = Some(SubscriptionType)) - - import sangria.execution.ExecutionScheme.Stream - - val stream: Observable[JsValue] = - Executor.execute(schema, graphql"subscription { letters numbers }", - queryValidator = QueryValidator.default.withoutValidation[SingleFieldSubscriptions]) - - val result = stream.toBlocking.toList - - result should ( - have(size(4)) and - contain("""{"data": {"letters": "a"}}""".parseJson) and - contain("""{"data": {"letters": "b"}}""".parseJson) and - contain("""{"data": {"numbers": 1}}""".parseJson) and - contain("""{"data": {"numbers": 2}}""".parseJson)) - } - } - "using monix" should { import _root_.monix.execution.Scheduler.Implicits.global import _root_.monix.reactive.Observable @@ -89,7 +48,7 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { Executor.execute(schema, graphql"subscription { letters numbers }", queryValidator = QueryValidator.default.withoutValidation[SingleFieldSubscriptions]) - val result = stream.toListL.runAsync.await(timeout) + val result = stream.toListL.runToFuture.await(timeout) result should ( have(size(4)) and @@ -124,7 +83,7 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { queryValidator = QueryValidator.default.withoutValidation[SingleFieldSubscriptions], exceptionHandler = exceptionHandler) - val result = stream.toListL.runAsync.await(timeout) + val result = stream.toListL.runToFuture.await(timeout) result should ( contain("""{"data": {"letters": null}, "errors": [{"message": "foo", "path":["letters"]}]}""".parseJson) and @@ -248,7 +207,7 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { exceptionHandler = exceptionHandler).await(timeout) } - val result = stream.toListL.runAsync.await(timeout) + val result = stream.toListL.runToFuture.await(timeout) result should ( have(size(3)) and @@ -281,7 +240,7 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { val stream = Executor.execute(schema, graphql"subscription { letters }") - val result = stream.toListL.runAsync.await(timeout) + val result = stream.toListL.runToFuture.await(timeout) result.map(_.result) should ( have(size(3)) and @@ -306,52 +265,6 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { contain("Subscription type 'Subscription' may either contain only non-subscription fields or only subscription fields (defined with `Field.subs`). Following fields are non-subscription fields among other subscription fields: 'hello'.")) } - "validate that all fields have same stream implementation at schema creation time" in { - val f1 = { - import sangria.streaming.rxscala._ - import rx.lang.scala.Observable - - Field.subs("letters", OptionType(StringType), - resolve = (_: Context[Unit, Unit]) => Observable.from(List("a")).map(Action(_))) - } - - val f2 = { - import sangria.streaming.monix._ - import _root_.monix.reactive.Observable - - Field.subs("otherLetters", OptionType(StringType), - resolve = (_: Context[Unit, Unit]) => Observable("a").map(Action(_))) - } - - val SubscriptionType = ObjectType("Subscription", fields[Unit, Unit](f1, f2)) - - val error = intercept [SchemaValidationException] (Schema(QueryType, subscription = Some(SubscriptionType))) - - error.violations.map(_.errorMessage) should ( - have(size(1)) and - contain("Some fields of subscription type 'Subscription' have incompatible stream implementations: 'otherLetters'.")) - } - - "validate that all fields have same stream implementation at stream merge" in { - val SubscriptionType = { - import _root_.monix.reactive.Observable - import sangria.streaming.monix._ - - ObjectType("Subscription", fields[Unit, Unit]( - Field.subs("letters", OptionType(StringType), resolve = _ => - Observable("a", "b", "c").map(Action(_))))) - } - - val schema = Schema(QueryType, subscription = Some(SubscriptionType)) - - import sangria.streaming.rxscala._ - import sangria.execution.ExecutionScheme.StreamExtended - - val stream = Executor.execute(schema, graphql"subscription { letters }") - - an [IllegalStateException] should be thrownBy stream.toBlocking.toList - } - "return first result for default execution scheme" in { import _root_.monix.reactive.Observable import sangria.streaming.monix._ @@ -390,7 +303,7 @@ class StreamSpec extends WordSpec with Matchers with FutureResultSupport { val stream = Executor.execute(schema, graphql"subscription { letters numbers }", queryValidator = QueryValidator.default.withoutValidation[SingleFieldSubscriptions]) - val result = stream.toListL.runAsync.await(timeout) + val result = stream.toListL.runToFuture.await(timeout) result should ( have(size(1)) and From 5232dfb0aead0eefce11f29437458be6f0d33b8f Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Mon, 16 Sep 2019 05:43:32 -0500 Subject: [PATCH 15/26] Update snapshot versions --- build.sbt | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/build.sbt b/build.sbt index 75473e5a..01e899e1 100644 --- a/build.sbt +++ b/build.sbt @@ -31,7 +31,7 @@ libraryDependencies ++= Seq( "org.sangria-graphql" %% "macro-visit" % "0.1.2-SNAPSHOT", // Marshalling - "org.sangria-graphql" %% "sangria-marshalling-api" % "2.0.0-SNAPSHOT", + "org.sangria-graphql" %% "sangria-marshalling-api" % "1.0.4-SNAPSHOT", // Streaming "org.sangria-graphql" %% "sangria-streaming-api" % "1.0.1-SNAPSHOT", @@ -42,10 +42,10 @@ libraryDependencies ++= Seq( // Testing "co.fs2" %% "fs2-core" % "2.0.0" % Test, "org.scalatest" %% "scalatest" % "3.0.8" % "test", - "org.sangria-graphql" %% "sangria-marshalling-testkit" % "1.0.3-SNAPSHOT" % Test, - "org.sangria-graphql" %% "sangria-spray-json" % "1.0.3-SNAPSHOT" % Test, - "org.sangria-graphql" %% "sangria-argonaut" % "1.0.2-SNAPSHOT" % Test, - "org.sangria-graphql" %% "sangria-ion" % "1.0.1-SNAPSHOT" % Test, + "org.sangria-graphql" %% "sangria-marshalling-testkit" % "1.0.2-SNAPSHOT" % Test, + "org.sangria-graphql" %% "sangria-spray-json" % "1.0.2-SNAPSHOT" % Test, + "org.sangria-graphql" %% "sangria-argonaut" % "1.0.1-SNAPSHOT" % Test, + "org.sangria-graphql" %% "sangria-ion" % "2.0.0-SNAPSHOT" % Test, "org.sangria-graphql" %% "sangria-monix" % "2.0.0-SNAPSHOT" % Test, "eu.timepit" %% "refined" % "0.9.9" % Test, From e269710abed843a67160bcc3bf292b75b14694bc Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Mon, 16 Sep 2019 05:47:43 -0500 Subject: [PATCH 16/26] Add MiMa --- build.sbt | 1 + project/build.properties | 2 +- project/coverage.sbt | 2 -- project/plugins.sbt | 3 +++ 4 files changed, 5 insertions(+), 3 deletions(-) delete mode 100644 project/coverage.sbt create mode 100644 project/plugins.sbt diff --git a/build.sbt b/build.sbt index 01e899e1..87c416b8 100644 --- a/build.sbt +++ b/build.sbt @@ -1,6 +1,7 @@ name := "sangria" organization := "org.sangria-graphql" version := "1.4.3-SNAPSHOT" +mimaPreviousArtifacts := Set("org.sangria-graphql" %% "sangria" % "1.4.2") description := "Scala GraphQL implementation" homepage := Some(url("http://sangria-graphql.org")) diff --git a/project/build.properties b/project/build.properties index c0bab049..080a737e 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.2.8 +sbt.version=1.3.0 diff --git a/project/coverage.sbt b/project/coverage.sbt deleted file mode 100644 index 68f5298f..00000000 --- a/project/coverage.sbt +++ /dev/null @@ -1,2 +0,0 @@ -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.5.1") -addSbtPlugin("org.scoverage" % "sbt-coveralls" % "1.2.2") diff --git a/project/plugins.sbt b/project/plugins.sbt new file mode 100644 index 00000000..97aff07b --- /dev/null +++ b/project/plugins.sbt @@ -0,0 +1,3 @@ +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.6.0") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.0") +addSbtPlugin("org.scoverage" % "sbt-coveralls" % "1.2.7") From 1cfc6415978248596bf25b8f28aa54f8b96588ff Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Mon, 16 Sep 2019 05:48:04 -0500 Subject: [PATCH 17/26] Update version to reflect bincompat status --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 87c416b8..ef75064f 100644 --- a/build.sbt +++ b/build.sbt @@ -1,6 +1,6 @@ name := "sangria" organization := "org.sangria-graphql" -version := "1.4.3-SNAPSHOT" +version := "2.0.0-SNAPSHOT" mimaPreviousArtifacts := Set("org.sangria-graphql" %% "sangria" % "1.4.2") description := "Scala GraphQL implementation" From c25d6bd15e8de415d76d263443fcedad693980b4 Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Mon, 16 Sep 2019 05:48:53 -0500 Subject: [PATCH 18/26] Update Scala versions --- .travis.yml | 9 +++++---- build.sbt | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 188ea1d8..efcc9a36 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,14 +1,15 @@ language: scala scala: - - 2.12.7 - - 2.11.11 + - 2.11.12 + - 2.12.10 + - 2.13.0 jdk: - - oraclejdk8 + - openjdk8 - openjdk11 matrix: exclude: - - scala: 2.11.11 + - scala: 2.11.12 jdk: openjdk11 script: | diff --git a/build.sbt b/build.sbt index ef75064f..2b9d0411 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ homepage := Some(url("http://sangria-graphql.org")) licenses := Seq("Apache License, ASL Version 2.0" → url("http://www.apache.org/licenses/LICENSE-2.0")) scalaVersion := "2.13.0" -crossScalaVersions := Seq("2.11.11", "2.12.7", scalaVersion.value) +crossScalaVersions := Seq("2.11.12", "2.12.10", scalaVersion.value) scalacOptions ++= Seq( "-deprecation", From 1f4980edd64f3626cc6af30c89a6f136d0ca74be Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Fri, 25 Oct 2019 08:30:57 -0500 Subject: [PATCH 19/26] Update to Sangria module releases --- build.sbt | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/build.sbt b/build.sbt index 2b9d0411..fd424727 100644 --- a/build.sbt +++ b/build.sbt @@ -29,13 +29,13 @@ libraryDependencies ++= Seq( "org.parboiled" %% "parboiled" % "2.1.8", // AST Visitor - "org.sangria-graphql" %% "macro-visit" % "0.1.2-SNAPSHOT", + "org.sangria-graphql" %% "macro-visit" % "0.1.2", // Marshalling - "org.sangria-graphql" %% "sangria-marshalling-api" % "1.0.4-SNAPSHOT", + "org.sangria-graphql" %% "sangria-marshalling-api" % "1.0.4", // Streaming - "org.sangria-graphql" %% "sangria-streaming-api" % "1.0.1-SNAPSHOT", + "org.sangria-graphql" %% "sangria-streaming-api" % "1.0.1", // Macros "org.scala-lang" % "scala-reflect" % scalaVersion.value, @@ -43,11 +43,11 @@ libraryDependencies ++= Seq( // Testing "co.fs2" %% "fs2-core" % "2.0.0" % Test, "org.scalatest" %% "scalatest" % "3.0.8" % "test", - "org.sangria-graphql" %% "sangria-marshalling-testkit" % "1.0.2-SNAPSHOT" % Test, - "org.sangria-graphql" %% "sangria-spray-json" % "1.0.2-SNAPSHOT" % Test, - "org.sangria-graphql" %% "sangria-argonaut" % "1.0.1-SNAPSHOT" % Test, - "org.sangria-graphql" %% "sangria-ion" % "2.0.0-SNAPSHOT" % Test, - "org.sangria-graphql" %% "sangria-monix" % "2.0.0-SNAPSHOT" % Test, + "org.sangria-graphql" %% "sangria-marshalling-testkit" % "1.0.2" % Test, + "org.sangria-graphql" %% "sangria-spray-json" % "1.0.2" % Test, + "org.sangria-graphql" %% "sangria-argonaut" % "1.0.1" % Test, + "org.sangria-graphql" %% "sangria-ion" % "2.0.0" % Test, + "org.sangria-graphql" %% "sangria-monix" % "2.0.0" % Test, "eu.timepit" %% "refined" % "0.9.9" % Test, // CATs From 5d5828bf675010496771d66dfe8194b6d69c49a9 Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Fri, 25 Oct 2019 08:31:58 -0500 Subject: [PATCH 20/26] Update sbt and plugins --- project/build.properties | 2 +- project/plugins.sbt | 7 ++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/project/build.properties b/project/build.properties index 080a737e..6adcdc75 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.3.0 +sbt.version=1.3.3 diff --git a/project/plugins.sbt b/project/plugins.sbt index 97aff07b..23af2b2d 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,3 +1,8 @@ -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.6.0") +resolvers += Resolver.url( + "typesafe sbt-plugins", + url("https://dl.bintray.com/typesafe/sbt-plugins") +)(Resolver.ivyStylePatterns) + +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.6.1") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.0") addSbtPlugin("org.scoverage" % "sbt-coveralls" % "1.2.7") From 8ff6ea1a9e53bdc6a9a69d2ba41be5247fa80c94 Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Fri, 25 Oct 2019 08:32:52 -0500 Subject: [PATCH 21/26] Change badge links to point to temporary GitHub org --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e1219822..3d564530 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [Sangria](http://sangria-graphql.org/) is a scala [GraphQL](http://facebook.github.io/graphql/) library. -[![Build Status](https://travis-ci.org/sangria-graphql/sangria.svg?branch=master)](https://travis-ci.org/sangria-graphql/sangria) [![Coverage Status](http://coveralls.io/repos/sangria-graphql/sangria/badge.svg?branch=master&service=github)](http://coveralls.io/github/sangria-graphql/sangria?branch=master) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.sangria-graphql/sangria_2.11/badge.svg)](https://maven-badges.herokuapp.com/maven-central/org.sangria-graphql/sangria_2.11) [![License](http://img.shields.io/:license-Apache%202-brightgreen.svg)](http://www.apache.org/licenses/LICENSE-2.0.txt) [![Scaladocs](https://www.javadoc.io/badge/org.sangria-graphql/sangria_2.12.svg?label=docs)](https://www.javadoc.io/doc/org.sangria-graphql/sangria_2.12) [![Join the chat at https://gitter.im/sangria-graphql/sangria](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/sangria-graphql/sangria?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) +[![Build Status](https://travis-ci.org/sangria-graphql-org/sangria.svg?branch=master)](https://travis-ci.org/sangria-graphql-org/sangria) [![Coverage Status](http://coveralls.io/repos/sangria-graphql-org/sangria/badge.svg?branch=master&service=github)](http://coveralls.io/github/sangria-graphql-org/sangria?branch=master) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.sangria-graphql/sangria_2.11/badge.svg)](https://maven-badges.herokuapp.com/maven-central/org.sangria-graphql/sangria_2.11) [![License](http://img.shields.io/:license-Apache%202-brightgreen.svg)](http://www.apache.org/licenses/LICENSE-2.0.txt) [![Scaladocs](https://www.javadoc.io/badge/org.sangria-graphql/sangria_2.12.svg?label=docs)](https://www.javadoc.io/doc/org.sangria-graphql/sangria_2.12) [![Join the chat at https://gitter.im/sangria-graphql/sangria](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/sangria-graphql/sangria?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) SBT Configuration: From d7af403204a145af32a2d460ae8ca8cefba6b5e4 Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Fri, 25 Oct 2019 08:33:27 -0500 Subject: [PATCH 22/26] Change SCM links in POM --- build.sbt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.sbt b/build.sbt index fd424727..6099c59f 100644 --- a/build.sbt +++ b/build.sbt @@ -70,8 +70,8 @@ startYear := Some(2015) organizationHomepage := Some(url("https://github.com/sangria-graphql")) developers := Developer("OlegIlyenko", "Oleg Ilyenko", "", url("https://github.com/OlegIlyenko")) :: Nil scmInfo := Some(ScmInfo( - browseUrl = url("https://github.com/sangria-graphql/sangria.git"), - connection = "scm:git:git@github.com:sangria-graphql/sangria.git" + browseUrl = url("https://github.com/sangria-graphql-org/sangria.git"), + connection = "scm:git:git@github.com:sangria-graphql-org/sangria.git" )) // nice *magenta* prompt! From c0ca0fa7d8f7c1d0ee600227e5b7d96f685d4b33 Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Fri, 25 Oct 2019 08:36:21 -0500 Subject: [PATCH 23/26] Add sbt-release --- build.sbt | 4 ++-- project/plugins.sbt | 2 ++ version.sbt | 1 + 3 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 version.sbt diff --git a/build.sbt b/build.sbt index 6099c59f..1419760f 100644 --- a/build.sbt +++ b/build.sbt @@ -1,6 +1,5 @@ name := "sangria" organization := "org.sangria-graphql" -version := "2.0.0-SNAPSHOT" mimaPreviousArtifacts := Set("org.sangria-graphql" %% "sangria" % "1.4.2") description := "Scala GraphQL implementation" @@ -56,7 +55,8 @@ libraryDependencies ++= Seq( ) // Publishing - +releaseCrossBuild := true +releasePublishArtifactsAction := PgpKeys.publishSigned.value publishMavenStyle := true publishArtifact in Test := false pomIncludeRepository := (_ ⇒ false) diff --git a/project/plugins.sbt b/project/plugins.sbt index 23af2b2d..98cc5198 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -3,6 +3,8 @@ resolvers += Resolver.url( url("https://dl.bintray.com/typesafe/sbt-plugins") )(Resolver.ivyStylePatterns) +addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.12") +addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.6.1") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.0") addSbtPlugin("org.scoverage" % "sbt-coveralls" % "1.2.7") diff --git a/version.sbt b/version.sbt new file mode 100644 index 00000000..c92517f1 --- /dev/null +++ b/version.sbt @@ -0,0 +1 @@ +version in ThisBuild := "2.0.0-SNAPSHOT" From 165dcfbee71e4430d4bd414f559fc90058518c29 Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Fri, 25 Oct 2019 08:47:29 -0500 Subject: [PATCH 24/26] Update fs2 test dependency --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 1419760f..2dd366f9 100644 --- a/build.sbt +++ b/build.sbt @@ -40,7 +40,7 @@ libraryDependencies ++= Seq( "org.scala-lang" % "scala-reflect" % scalaVersion.value, // Testing - "co.fs2" %% "fs2-core" % "2.0.0" % Test, + "co.fs2" %% "fs2-core" % "2.0.1" % Test, "org.scalatest" %% "scalatest" % "3.0.8" % "test", "org.sangria-graphql" %% "sangria-marshalling-testkit" % "1.0.2" % Test, "org.sangria-graphql" %% "sangria-spray-json" % "1.0.2" % Test, From ede919343a868a105a0c0291b951d9b9e5ac0222 Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Fri, 25 Oct 2019 08:47:51 -0500 Subject: [PATCH 25/26] Update Refined test dependency --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 2dd366f9..015b0005 100644 --- a/build.sbt +++ b/build.sbt @@ -47,7 +47,7 @@ libraryDependencies ++= Seq( "org.sangria-graphql" %% "sangria-argonaut" % "1.0.1" % Test, "org.sangria-graphql" %% "sangria-ion" % "2.0.0" % Test, "org.sangria-graphql" %% "sangria-monix" % "2.0.0" % Test, - "eu.timepit" %% "refined" % "0.9.9" % Test, + "eu.timepit" %% "refined" % "0.9.10" % Test, // CATs "net.jcazevedo" %% "moultingyaml" % "0.4.1" % Test, From 123c986a7c91a5a83aefbc14b854cf89f5145bbd Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Fri, 25 Oct 2019 08:50:13 -0500 Subject: [PATCH 26/26] Use new test scope format for consistency --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 015b0005..a1f5596c 100644 --- a/build.sbt +++ b/build.sbt @@ -41,7 +41,7 @@ libraryDependencies ++= Seq( // Testing "co.fs2" %% "fs2-core" % "2.0.1" % Test, - "org.scalatest" %% "scalatest" % "3.0.8" % "test", + "org.scalatest" %% "scalatest" % "3.0.8" % Test, "org.sangria-graphql" %% "sangria-marshalling-testkit" % "1.0.2" % Test, "org.sangria-graphql" %% "sangria-spray-json" % "1.0.2" % Test, "org.sangria-graphql" %% "sangria-argonaut" % "1.0.1" % Test,