diff --git a/build.sbt b/build.sbt index 49061dc..e5fa7a1 100644 --- a/build.sbt +++ b/build.sbt @@ -23,7 +23,6 @@ headers := Map( libraryDependencies ++= { val catsCoreV = "2.10.0" - val catsEffectV = "3.5.1" val existV = "4.4.0" val algoliaV = "2.19.0" val akkaV = "2.5.16" @@ -33,7 +32,6 @@ libraryDependencies ++= { "org.scala-lang.modules" %% "scala-parser-combinators" % "1.1.1", "org.scala-lang.modules" %% "scala-java8-compat" % "0.9.0", "org.typelevel" %% "cats-core" % catsCoreV, - "org.typelevel" %% "cats-effect" % catsEffectV, "org.clapper" %% "grizzled-slf4j" % "1.3.2" exclude("org.slf4j", "slf4j-api"), diff --git a/src/main/scala/org/humanistika/exist/index/algolia/AlgoliaIndexWorker.scala b/src/main/scala/org/humanistika/exist/index/algolia/AlgoliaIndexWorker.scala index 1c0e17a..63e640e 100644 --- a/src/main/scala/org/humanistika/exist/index/algolia/AlgoliaIndexWorker.scala +++ b/src/main/scala/org/humanistika/exist/index/algolia/AlgoliaIndexWorker.scala @@ -29,9 +29,8 @@ import org.exist.xquery.XQueryContext import org.exist_db.collection_config._1.Algolia import org.w3c.dom.{Element, Node, NodeList} import AlgoliaIndexWorker._ -import akka.actor.{ActorPath, ActorRef, ActorSystem} +import akka.actor.{ActorRef, ActorSystem} import org.apache.logging.log4j.{LogManager, Logger} -import org.humanistika.exist.index.algolia.backend.IncrementalIndexingManagerActor import org.humanistika.exist.index.algolia.backend.IncrementalIndexingManagerActor.RemoveForCollection import scala.collection.JavaConverters._ diff --git a/src/main/scala/org/humanistika/exist/index/algolia/Checksum.scala b/src/main/scala/org/humanistika/exist/index/algolia/Checksum.scala index de680e0..c816d1e 100644 --- a/src/main/scala/org/humanistika/exist/index/algolia/Checksum.scala +++ b/src/main/scala/org/humanistika/exist/index/algolia/Checksum.scala @@ -21,9 +21,6 @@ import java.io.InputStream import java.nio.file.{Files, Path} import java.security.MessageDigest -import cats.effect.{IO, Resource} -import cats.effect.unsafe.implicits.global // TODO(AR) switch to using cats.effect.IOApp - import scala.annotation.tailrec /** @@ -66,18 +63,12 @@ object Checksum { digest.digest() } - val checksumIO = Resource.make(IO { Files.newInputStream(file) })(is => IO { is.close() }).use { is => - getHash(algorithm).flatMap { digest => - // 16 KB buffer - IO.pure(Array.ofDim[Byte](bufferSize)).flatMap { buf => - IO { digestStream(is, buf, digest) } - } - } - } - - checksumIO - .attempt - .unsafeRunSync() + With(Files.newInputStream(file)) { is => + val digest = getHash(algorithm) + // 16 KB buffer + val buf = Array.ofDim[Byte](bufferSize) + digestStream(is, buf, digest) + }.toEither } /** @@ -87,24 +78,22 @@ object Checksum { * * @return the message digest */ - private def getHash(algorithm: Algorithm) : IO[MessageDigest] = { - IO { - algorithm match { - case MD2 => - MessageDigest.getInstance("MD2") - case MD5 => - MessageDigest.getInstance("MD5") - case SHA1 => - MessageDigest.getInstance("SHA-1") - case SHA256 => - MessageDigest.getInstance("SHA-256") - case SHA384 => - MessageDigest.getInstance("SHA-384") - case SHA512 => - MessageDigest.getInstance("SHA-512") - case _ => - throw new UnsupportedOperationException(s"Support for $algorithm not yet implemented.") - } + private def getHash(algorithm: Algorithm) : MessageDigest = { + algorithm match { + case MD2 => + MessageDigest.getInstance("MD2") + case MD5 => + MessageDigest.getInstance("MD5") + case SHA1 => + MessageDigest.getInstance("SHA-1") + case SHA256 => + MessageDigest.getInstance("SHA-256") + case SHA384 => + MessageDigest.getInstance("SHA-384") + case SHA512 => + MessageDigest.getInstance("SHA-512") + case _ => + throw new UnsupportedOperationException(s"Support for $algorithm not yet implemented.") } } } diff --git a/src/main/scala/org/humanistika/exist/index/algolia/Serializer.scala b/src/main/scala/org/humanistika/exist/index/algolia/Serializer.scala index 4fb4044..59733c3 100644 --- a/src/main/scala/org/humanistika/exist/index/algolia/Serializer.scala +++ b/src/main/scala/org/humanistika/exist/index/algolia/Serializer.scala @@ -29,8 +29,6 @@ import org.exist.util.serializer.{SAXSerializer, SerializerPool} import org.humanistika.exist.index.algolia.JsonUtil.writeValueField import org.humanistika.exist.index.algolia.LiteralTypeConfig.LiteralTypeConfig import org.w3c.dom.{Attr, Element, NamedNodeMap, Node, NodeList, Text} -import cats.effect.{IO, Resource} -import cats.effect.unsafe.implicits.global // TODO(AR) switch to using cats.effect.IOApp import cats.syntax.either._ object Serializer { @@ -123,44 +121,38 @@ object Serializer { } } - val jsonIO: IO[String] = Resource.fromAutoCloseable(IO { new StringWriter() }).use { writer => - Resource.fromAutoCloseable(IO { new JsonFactory().createGenerator(writer) }).use { gen => - IO { - val childNodes = element.getChildNodes - if (hasOnlyTextChildren(childNodes)) { + With(new StringWriter()) { writer => + With(new JsonFactory().createGenerator(writer)) { gen => + val childNodes = element.getChildNodes + if (hasOnlyTextChildren(childNodes)) { - // needed so Jackson's JSON Generator won't complain - gen.writeStartObject() + // needed so Jackson's JSON Generator won't complain + gen.writeStartObject() - gen.writeRaw(',') + gen.writeRaw(',') - serializeAttributes(gen)(element.getAttributes) - if (childNodes.getLength > 0) { - serializeTextNodes(gen)(childNodes) - } + serializeAttributes(gen)(element.getAttributes) + if (childNodes.getLength > 0) { + serializeTextNodes(gen)(childNodes) + } - // needed so Jackson's JSON Generator won't complain - gen.writeEndObject() - } else { - serialize(element).map { elementGenerator => + // needed so Jackson's JSON Generator won't complain + gen.writeEndObject() + } else { + serialize(element).map { elementGenerator => - // needed so Jackson's JSON Generator won't complain - gen.writeStartObject() + // needed so Jackson's JSON Generator won't complain + gen.writeStartObject() - elementGenerator(gen) + elementGenerator(gen) - // needed so Jackson's JSON Generator won't complain - gen.writeEndObject() - } + // needed so Jackson's JSON Generator won't complain + gen.writeEndObject() } } }.map(_ => stripStartObjectEndObject(writer.toString)) // strip the extras we added for the JSON generator (so it won't complain) } - - jsonIO - .redeem(_.asLeft.leftMap(Seq(_)), _.asRight) - .unsafeRunSync() - } + }.flatten.toEither.leftMap(Seq(_)) def serializeAsText(node: Node): Either[Seq[Throwable], String] = { val properties = new Properties() @@ -226,22 +218,15 @@ object Serializer { } def serialize(node: Node, properties: Properties): Either[Seq[Throwable], String] = { - - val serializationIO = Resource.make(IO {serializerPool.borrowObject(classOf[SAXSerializer]).asInstanceOf[SAXSerializer]})(serializer => IO {serializerPool.returnObject()}).use { serializer => - Resource.fromAutoCloseable(IO {new StringWriter()}).use { writer => - IO { - serializer.setOutput(writer, properties) - - val transformer = transformerFactory.newTransformer() - val result = new SAXResult(serializer) - transformer.transform(new DOMSource(node), result) - writer.toString - } + With(serializerPool.borrowObject(classOf[SAXSerializer]).asInstanceOf[SAXSerializer])(serializer => serializerPool.returnObject(serializer)) { serializer => + With(new StringWriter()) { writer => + serializer.setOutput(writer, properties) + + val transformer = transformerFactory.newTransformer() + val result = new SAXResult(serializer) + transformer.transform(new DOMSource(node), result) + writer.toString } - } - - serializationIO - .redeem(_.asLeft.leftMap(Seq(_)), _.asRight) - .unsafeRunSync() + }.flatten.toEither.leftMap(Seq(_)) } } diff --git a/src/main/scala/org/humanistika/exist/index/algolia/With.scala b/src/main/scala/org/humanistika/exist/index/algolia/With.scala new file mode 100644 index 0000000..f2af52a --- /dev/null +++ b/src/main/scala/org/humanistika/exist/index/algolia/With.scala @@ -0,0 +1,32 @@ +package org.humanistika.exist.index.algolia + +import scala.util.{Failure, Success, Try} + +// TODO(AR) replace with `Using` in Scala 2.13 +object With { + def apply[R, A](acquire: => R)(release: R => Unit)(f: R => A): Try[A] = { + val r = acquire + try { + val a = f(r) + Success(a) + } catch { + case t: Throwable => + Failure(t) + } finally { + release(r) + } + } + + def apply[R <: AutoCloseable, A](acquire: => R)(f: R => A): Try[A] = { + val r = acquire + try { + val a = f(r) + Success(a) + } catch { + case t: Throwable => + Failure(t) + } finally { + r.close() + } + } +} diff --git a/src/main/scala/org/humanistika/exist/index/algolia/backend/IndexLocalStoreManagerActor.scala b/src/main/scala/org/humanistika/exist/index/algolia/backend/IndexLocalStoreManagerActor.scala index f9f8355..558bba9 100644 --- a/src/main/scala/org/humanistika/exist/index/algolia/backend/IndexLocalStoreManagerActor.scala +++ b/src/main/scala/org/humanistika/exist/index/algolia/backend/IndexLocalStoreManagerActor.scala @@ -36,8 +36,6 @@ import org.humanistika.exist.index.algolia.backend.IndexLocalStoreActor.FILE_SUF import scala.collection.JavaConverters._ import scala.concurrent.{Await, Future} import scala.util.{Failure, Success, Try} -import cats.effect.{IO, Resource} -import cats.effect.unsafe.implicits.global // TODO(AR) switch to using cats.effect.IOApp import cats.syntax.either._ import grizzled.slf4j.Logger import org.apache.commons.codec.binary.Base32 @@ -179,23 +177,13 @@ class IndexLocalStoreActor(indexesDir: Path, indexName: String) extends Actor { this.perDocumentActors = Map.empty // find the latest timestamp dir for each document id - val fileIO = Resource.fromAutoCloseable(IO { Files.list(localIndexStoreDir)}).use { indexDirStream => - IO { + val latestTimestampDirs: Seq[Path] = With(Files.list(localIndexStoreDir)) { indexDirStream => indexDirStream .filter(Files.isDirectory(_)) .collect(Collectors.toList()) .asScala .map(getLatestTimestampDir(_, None)) - } - } - val latestTimestampDirs: Seq[Path] = fileIO - .redeem(_.asLeft, _.asRight) - .unsafeRunSync() match { - case Right(timestampDirs) => - timestampDirs.flatten - case Left(t) => - throw t //TODO(AR) better error messages - } + }.get.flatten //delete any rootObjects from the latest timestamps which match the collection path tree val rootObjectsInCollectionTree = latestTimestampDirs.map(rootObjectsByCollectionTree(_, collectionPath)).flatten @@ -225,20 +213,9 @@ class IndexLocalStoreActor(indexesDir: Path, indexName: String) extends Actor { } private def isEmpty(dir: Path) : Boolean = { - val fileIO = Resource.fromAutoCloseable(IO { Files.list(dir)}).use { stream => - IO { + With(Files.list(dir)) { stream => !stream.findFirst().isPresent - } - } - - fileIO - .redeem(_.asLeft, _.asRight) - .unsafeRunSync() match { - case Right(result) => - result - case Left(t) => - throw t //TODO(AR) better error messsages - } + }.get } /** @@ -253,24 +230,14 @@ class IndexLocalStoreActor(indexesDir: Path, indexName: String) extends Actor { rootObjectCollectionPath.exists(_.startsWith(collectionPath)) } - val fileIO = Resource.fromAutoCloseable(IO { Files.list(timestampDir)}).use { timestampDirStream => - IO { - timestampDirStream - .filter(Files.isRegularFile(_)) - .filter(FileUtils.fileName(_).endsWith(s".$FILE_SUFFIX")) - .filter(matchesCollectionPathRoot) - .collect(Collectors.toList()) - .asScala - } - } - - fileIO - .redeem(_.asLeft, _.asRight) - .unsafeRunSync() match { - case Right(rootObjectsMatchingCollection) => - rootObjectsMatchingCollection - case Left(t) => throw t //TODO(AR) better error messages - } + With(Files.list(timestampDir)) { timestampDirStream => + timestampDirStream + .filter(Files.isRegularFile(_)) + .filter(FileUtils.fileName(_).endsWith(s".$FILE_SUFFIX")) + .filter(matchesCollectionPathRoot) + .collect(Collectors.toList()) + .asScala + }.get } private def getOrCreatePerDocumentActor(documentId: DocumentId) : ActorRef = perDocumentActors.getOrElse(documentId, createPerDocumentActor(documentId)) @@ -299,26 +266,14 @@ object IndexLocalStoreDocumentActor { def getLatestTimestampDir(dir: Path, lt: Option[Timestamp] = None): Option[Path] = { def timestampFromPath(p: Path): Timestamp = p.getFileName.toString.toLong - val fileIO = Resource.fromAutoCloseable(IO { Files.list(dir)}).use { stream => - IO { - stream - .filter(Files.isDirectory(_)) - .filter(dir => lt.map(timestamp => timestampFromPath(dir) < timestamp).getOrElse(true)) - .collect(Collectors.toList()).asScala - } - } - - fileIO - .redeem(_.asLeft, _.asRight) - .unsafeRunSync() match { - case Right(prevTimestamps) => - prevTimestamps - .sortWith{ case (p1, p2) => timestampFromPath(p1) > timestampFromPath(p2)} - .headOption - - case Left(t) => - throw t //TODO(AR) better error reporting - } + With(Files.list(dir)) { stream => + stream + .filter(Files.isDirectory(_)) + .filter(dir => lt.map(timestamp => timestampFromPath(dir) < timestamp).getOrElse(true)) + .collect(Collectors.toList()).asScala + }.get + .sortWith { case (p1, p2) => timestampFromPath(p1) > timestampFromPath(p2) } + .headOption } } @@ -341,16 +296,14 @@ class IndexLocalStoreDocumentActor(indexDir: Path, documentId: DocumentId) exten val nodeIdFilename = filenameUsableNodeId(indexableRootObject.userSpecifiedNodeId, indexableRootObject.nodeId) val file = dir.resolve(s"${nodeIdFilename}.$FILE_SUFFIX") - Resource.fromAutoCloseable(IO {Files.newBufferedWriter(file)}).use { writer => - IO { + With(Files.newBufferedWriter(file)) { writer => writer.write(serializeJson(indexableRootObject)) - } - }.redeem(_.asLeft, _.asRight).unsafeRunSync() match { - case Right(_) => - if(logger.isTraceEnabled) { + } match { + case Success(_) => + if (logger.isTraceEnabled) { logger.trace(s"Stored JSON rootObject '${file}' for (collectionPath=${indexableRootObject.collectionPath}, docId=${indexableRootObject.documentId}, userSpecificDocId=${indexableRootObject.userSpecifiedDocumentId}, nodeId=${indexableRootObject.nodeId}, userSpecificNodeId=${indexableRootObject.userSpecifiedNodeId}): ${indexDir.getFileName}") } - case Left(t) => throw t //TODO(AR) do some better error handling + case Failure(t) => throw t //TODO(AR) do some better error handling } case FindChanges(timestamp, userSpecifiedDocumentId, documentId) => @@ -473,14 +426,11 @@ class IndexLocalStoreDocumentActor(indexDir: Path, documentId: DocumentId) exten } private def listFiles(dir: Path) : Either[Seq[Throwable], Seq[Path]] = { - Resource.fromAutoCloseable(IO { Files.list(dir)}).use { stream => - IO { - stream - .filter(Files.isRegularFile(_)) - .collect(Collectors.toList()).asScala - } - }.redeem(_.asLeft.leftMap(Seq(_)), _.asRight) - .unsafeRunSync() + With(Files.list(dir)) { stream => + stream + .filter(Files.isRegularFile(_)) + .collect(Collectors.toList()).asScala + }.toEither.leftMap(Seq(_)) } private def findPreviousDir(timestampDir: Path): Option[Path] = { @@ -501,21 +451,10 @@ class IndexLocalStoreDocumentActor(indexDir: Path, documentId: DocumentId) exten private def filenameUsableNodeId(userSpecifiedNodeId: Option[String], nodeId: Option[String]) = userSpecifiedNodeId.map(base32Encode).getOrElse(nodeId.getOrElse(DOCUMENT_NODE_ID)) private def serializeJson(indexableRootObject: IndexableRootObject): String = { - val serializeIO = Resource.fromAutoCloseable(IO { new StringWriter()}).use { writer => - IO { + With(new StringWriter()) { writer => mapper.writeValue(writer, indexableRootObject) writer.toString - } - } - - serializeIO - .redeem(_.asLeft, _.asRight) - .unsafeRunSync() match { - case Right(result) => - result - case Left(t) => - throw t - } + }.get } private def checksum(file: Path): Either[Throwable, Array[Byte]] = Checksum.checksum(file, Checksum.MD5) diff --git a/src/test/scala/org/humanistika/exist/index/algolia/DOMHelper.scala b/src/test/scala/org/humanistika/exist/index/algolia/DOMHelper.scala index 3f0ccd2..8a41617 100644 --- a/src/test/scala/org/humanistika/exist/index/algolia/DOMHelper.scala +++ b/src/test/scala/org/humanistika/exist/index/algolia/DOMHelper.scala @@ -2,15 +2,10 @@ package org.humanistika.exist.index.algolia import java.io.ByteArrayInputStream import java.nio.charset.StandardCharsets - -import cats.effect.{IO, Resource} -import cats.effect.unsafe.implicits.global // TODO(AR) switch to using cats.effect.IOApp -import cats.syntax.either._ import javax.xml.parsers.DocumentBuilderFactory import org.w3c.dom.{Attr, Document, Element, Node} import scala.annotation.tailrec -import scala.util.{Failure, Success} object DOMHelper { @@ -19,16 +14,9 @@ object DOMHelper { def dom(xml: String) : Document = { val documentBuilder = documentBuilderFactory.newDocumentBuilder() - Resource.fromAutoCloseable(IO {new ByteArrayInputStream(xml.getBytes(StandardCharsets.UTF_8))}).use { is => - IO { + With(new ByteArrayInputStream(xml.getBytes(StandardCharsets.UTF_8))) { is => documentBuilder.parse(is) - } - }.redeem(_.asLeft, _.asRight).unsafeRunSync() match { - case Right(s) => - s - case Left(t) => - throw t - } + }.get } def attr(node: Node, name: String) : Attr = { diff --git a/src/test/scala/org/humanistika/exist/index/algolia/IndexableRootObjectJsonSerializerSpec.scala b/src/test/scala/org/humanistika/exist/index/algolia/IndexableRootObjectJsonSerializerSpec.scala index 14864f3..c50dc6d 100644 --- a/src/test/scala/org/humanistika/exist/index/algolia/IndexableRootObjectJsonSerializerSpec.scala +++ b/src/test/scala/org/humanistika/exist/index/algolia/IndexableRootObjectJsonSerializerSpec.scala @@ -1,21 +1,14 @@ package org.humanistika.exist.index.algolia import DOMHelper._ -import java.io.{ByteArrayInputStream, StringWriter} -import java.nio.charset.StandardCharsets +import java.io.StringWriter import javax.xml.namespace.QName -import javax.xml.parsers.DocumentBuilderFactory import com.fasterxml.jackson.databind.ObjectMapper import org.humanistika.exist.index.algolia.Serializer.{serializeElementForAttribute, serializeElementForObject} import org.specs2.Specification -import org.w3c.dom.{Attr, Document, Element, Node} -import cats.effect.{IO, Resource} -import cats.effect.unsafe.implicits.global // TODO(AR) switch to using cats.effect.IOApp import cats.syntax.either._ -import scala.util.{Failure, Success} - class IndexableRootObjectJsonSerializerSpec extends Specification { def is = s2""" This is a specification to check the JSON Serialization of IndexableRootObject @@ -322,18 +315,10 @@ class IndexableRootObjectJsonSerializerSpec extends Specification { def is = s2" } private def serializeJson(indexableRootObject: IndexableRootObject): String = { - Resource.fromAutoCloseable(IO {new StringWriter()}).use { writer => - IO { + With(new StringWriter()) { writer => val mapper = new ObjectMapper mapper.writeValue(writer, indexableRootObject) writer.toString - } - }.redeem(_.asLeft, _.asRight) - .unsafeRunSync() match { - case Right(s) => - s - case Left(t) => - throw t - } + }.get } } diff --git a/src/test/scala/org/humanistika/exist/index/algolia/LocalIndexableRootObjectJsonSerializerSpec.scala b/src/test/scala/org/humanistika/exist/index/algolia/LocalIndexableRootObjectJsonSerializerSpec.scala index 1f9b04d..5daec5b 100644 --- a/src/test/scala/org/humanistika/exist/index/algolia/LocalIndexableRootObjectJsonSerializerSpec.scala +++ b/src/test/scala/org/humanistika/exist/index/algolia/LocalIndexableRootObjectJsonSerializerSpec.scala @@ -8,10 +8,6 @@ import com.fasterxml.jackson.databind.ObjectMapper import org.specs2.Specification import java.nio.charset.StandardCharsets.UTF_8 -import cats.effect.{IO, Resource} -import cats.effect.unsafe.implicits.global // TODO(AR) switch to using cats.effect.IOApp -import cats.syntax.either._ - class LocalIndexableRootObjectJsonSerializerSpec extends Specification { def is = s2""" This is a specification to check the JSON Serialization of IndexableRootObject @@ -35,30 +31,17 @@ class LocalIndexableRootObjectJsonSerializerSpec extends Specification { def is private def createTempJsonFile(json: String) : Path = { val p = Files.createTempFile("test", "json") - Resource.fromAutoCloseable(IO {Files.newBufferedWriter(p, UTF_8)}).use { writer => - IO { + With(Files.newBufferedWriter(p, UTF_8)) { writer => writer.write(json) - } - }.redeem(_.asLeft, _.asRight).unsafeRunSync() match { - case Right(_) => p - case Left(t) => - throw t - } + }.get } private def serializeJson[T](obj: T): String = { - Resource.fromAutoCloseable(IO {new StringWriter}).use { writer => - IO { + With(new StringWriter) { writer => val mapper = new ObjectMapper mapper.writeValue(writer, obj) writer.toString - } - }.redeem(_.asLeft, _.asRight).unsafeRunSync() match { - case Right(s) => - s - case Left(t) => - throw t - } + }.get } } diff --git a/src/test/scala/org/humanistika/exist/index/algolia/SerializerSpec.scala b/src/test/scala/org/humanistika/exist/index/algolia/SerializerSpec.scala index 31f60a8..ea402f0 100644 --- a/src/test/scala/org/humanistika/exist/index/algolia/SerializerSpec.scala +++ b/src/test/scala/org/humanistika/exist/index/algolia/SerializerSpec.scala @@ -1,13 +1,8 @@ package org.humanistika.exist.index.algolia import Serializer.{serializeElementForAttribute, serializeElementForObject} -import DOMHelper._ import java.io.ByteArrayInputStream import java.nio.charset.StandardCharsets - -import cats.effect.{IO, Resource} -import cats.effect.unsafe.implicits.global // TODO(AR) switch to using cats.effect.IOApp -import cats.syntax.either._ import javax.xml.parsers.DocumentBuilderFactory import org.specs2.Specification import org.w3c.dom.{Document, Element, Node} @@ -84,16 +79,9 @@ class SerializerSpec extends Specification { def is = s2""" private lazy val documentBuilderFactory = DocumentBuilderFactory.newInstance() private def dom(xml: String) : Document = { val documentBuilder = documentBuilderFactory.newDocumentBuilder() - Resource.fromAutoCloseable(IO {new ByteArrayInputStream(xml.getBytes(StandardCharsets.UTF_8))}).use { is => - IO { + With(new ByteArrayInputStream(xml.getBytes(StandardCharsets.UTF_8))) { is => documentBuilder.parse(is) - } - }.redeem(_.asLeft, _.asRight).unsafeRunSync() match { - case Right(s) => - s - case Left(t) => - throw t - } + }.get } private def elem(node: Node, name: String) : Element = {