From cacbb4cd8a2b7b63a12e0a937056bfe03df6b108 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Bj=C3=B8rn=20Erik=20Pedersen?=
 <bjorn.erik.pedersen@gmail.com>
Date: Wed, 2 Jan 2019 12:33:26 +0100
Subject: [PATCH] Make Page an interface

The main motivation of this commit is to add a `page.Page` interface to replace the very file-oriented `hugolib.Page` struct.
This is all a preparation step for issue  #5074, "pages from other data sources".

But this also fixes a set of annoying limitations, especially related to custom output formats, and shortcodes.

Most notable changes:

* The inner content of shortcodes using the `{{%` as the outer-most delimiter will now be sent to the content renderer, e.g. Blackfriday.
  This means that any markdown will partake in the global ToC and footnote context etc.
* The Custom Output formats are now "fully virtualized". This removes many of the current limitations.
* The taxonomy list type now has a reference to the `Page` object.
  This improves the taxonomy template `.Title` situation and make common template constructs much simpler.

And it's faster.

See #5074
Fixes #5763
Fixes #5758
Fixes #5090
Fixes #5204
Fixes #4695
Fixes #5607
Fixes #5707
Fixes #5719
Fixes #3113
Fixes #5706
Fixes #5767
Fixes #5723
Fixes #5769
Fixes #5770
Fixes #5771
Fixes #5759
Fixes #5776
---
 benchbep.sh                                   |    2 +
 codegen/methods.go                            |  529 ++++
 .../methods2_test.go                          |   17 +-
 codegen/methods_test.go                       |  100 +
 commands/commandeer.go                        |    9 +-
 commands/commands.go                          |   20 +-
 commands/commands_test.go                     |   21 +-
 commands/convert.go                           |   22 +-
 commands/hugo.go                              |  176 +-
 commands/import_jekyll.go                     |   10 +-
 commands/list.go                              |   14 +-
 commands/new_content_test.go                  |   12 +-
 commands/server.go                            |   10 +-
 common/collections/append.go                  |    6 +-
 common/collections/slice_test.go              |    6 +-
 common/hugio/readers.go                       |    3 +-
 common/maps/scratch.go                        |   20 +-
 common/types/types.go                         |   24 +-
 config/configProvider.go                      |   14 +-
 config/services/servicesConfig.go             |   14 +-
 {hugolib => config}/sitemap.go                |   15 +-
 create/content.go                             |    8 +-
 deps/deps.go                                  |    7 +-
 docs/content/en/variables/page.md             |   15 +-
 go.mod                                        |    1 -
 go.sum                                        |    2 -
 helpers/content.go                            |    8 +-
 helpers/content_renderer_test.go              |    6 +-
 helpers/content_test.go                       |   24 +-
 helpers/general.go                            |   11 +-
 helpers/general_test.go                       |   25 +-
 helpers/path.go                               |    9 +-
 helpers/pygments.go                           |    4 +-
 htesting/test_structs.go                      |   53 +-
 hugofs/createcounting_fs.go                   |   99 +
 hugofs/fs.go                                  |    8 +-
 hugofs/hashing_fs.go                          |    6 +-
 hugofs/stacktracer_fs.go                      |   70 +
 hugolib/alias.go                              |   29 +-
 hugolib/alias_test.go                         |    4 +-
 hugolib/collections.go                        |   75 +-
 hugolib/collections_test.go                   |   18 +-
 hugolib/config.go                             |   23 +-
 hugolib/datafiles_test.go                     |    6 +-
 hugolib/disableKinds_test.go                  |   40 +-
 hugolib/embedded_shortcodes_test.go           |   10 +-
 hugolib/gitinfo.go                            |   14 +-
 hugolib/hugo_sites.go                         |  520 ++--
 hugolib/hugo_sites_build.go                   |  175 +-
 hugolib/hugo_sites_build_errors_test.go       |  123 +-
 hugolib/hugo_sites_build_test.go              |  418 +--
 hugolib/hugo_sites_multihost_test.go          |   26 +-
 hugolib/hugo_smoke_test.go                    |  303 +++
 hugolib/language_content_dir_test.go          |   78 +-
 hugolib/media.go                              |   60 -
 hugolib/menu_test.go                          |    6 +-
 hugolib/minify_publisher_test.go              |   20 +-
 hugolib/multilingual.go                       |   16 +-
 hugolib/orderedMap.go                         |   99 -
 hugolib/orderedMap_test.go                    |   69 -
 hugolib/page.go                               | 2397 ++++-------------
 hugolib/page__common.go                       |  112 +
 hugolib/page__data.go                         |   70 +
 hugolib/page__menus.go                        |   74 +
 hugolib/page__meta.go                         |  652 +++++
 hugolib/page__new.go                          |  291 ++
 hugolib/page__output.go                       |  107 +
 hugolib/page__paths.go                        |  148 +
 hugolib/page__per_output.go                   |  445 +++
 hugolib/{page_ref.go => page__ref.go}         |   61 +-
 hugolib/page__tree.go                         |  113 +
 hugolib/page_content.go                       |  248 +-
 hugolib/page_errors.go                        |   47 -
 hugolib/page_kinds.go                         |   40 +
 hugolib/page_output.go                        |  320 ---
 hugolib/page_pagination.go                    |   83 +
 hugolib/page_paths.go                         |  312 ---
 hugolib/page_paths_test.go                    |  194 --
 hugolib/page_permalink_test.go                |   44 +-
 hugolib/page_position.go                      |   76 +
 hugolib/page_taxonomy_test.go                 |   96 -
 hugolib/page_test.go                          |  901 ++-----
 hugolib/page_time_integration_test.go         |  183 --
 hugolib/page_unwrap.go                        |   50 +
 ...separators_test.go => page_unwrap_test.go} |   31 +-
 hugolib/page_without_content.go               |   67 -
 hugolib/pagebundler.go                        |   41 +-
 hugolib/pagebundler_capture.go                |   10 +-
 hugolib/pagebundler_capture_test.go           |   12 +-
 hugolib/pagebundler_handlers.go               |  141 +-
 hugolib/pagebundler_test.go                   |  273 +-
 hugolib/pagecollections.go                    |  279 +-
 hugolib/pagecollections_test.go               |   86 +-
 hugolib/pages_language_merge_test.go          |   40 +-
 hugolib/pages_related_test.go                 |   75 -
 hugolib/pagination_test.go                    |  579 ----
 hugolib/paths/themes.go                       |    4 +-
 hugolib/permalinker.go                        |    5 +-
 hugolib/permalinks.go                         |  213 --
 hugolib/permalinks_test.go                    |   85 -
 hugolib/resource_chain_test.go                |    8 +-
 hugolib/rss_test.go                           |    4 +-
 hugolib/shortcode.go                          |  490 +---
 hugolib/shortcode_page.go                     |   56 +
 hugolib/shortcode_test.go                     |  402 ++-
 hugolib/site.go                               | 1112 ++++----
 hugolib/siteJSONEncode_test.go                |   34 +-
 hugolib/site_output.go                        |   15 +-
 hugolib/site_output_test.go                   |   42 +-
 hugolib/site_render.go                        |  446 ++-
 hugolib/site_sections.go                      |  273 +-
 hugolib/site_sections_test.go                 |  150 +-
 hugolib/site_test.go                          |  123 +-
 hugolib/site_url_test.go                      |   14 +-
 hugolib/sitemap_test.go                       |   10 +-
 hugolib/taxonomy.go                           |  151 +-
 hugolib/taxonomy_test.go                      |  161 +-
 hugolib/testhelpers_test.go                   |  123 +-
 hugolib/translations.go                       |   56 +-
 langs/language.go                             |   16 +-
 lazy/init.go                                  |  199 ++
 lazy/init_test.go                             |  150 ++
 lazy/once.go                                  |   69 +
 magefile.go                                   |   35 +-
 media/mediaType.go                            |    9 +-
 media/mediaType_test.go                       |    4 +-
 {hugolib => navigation}/menu.go               |   41 +-
 navigation/pagemenus.go                       |  240 ++
 output/outputFormat.go                        |   35 +-
 output/outputFormat_test.go                   |    5 +-
 parser/pageparser/itemtype_string.go          |   16 +
 parser/pageparser/pageparser.go               |   27 +-
 parser/pageparser/pageparser_test.go          |    6 +-
 publisher/publisher.go                        |    4 +-
 related/inverted_index.go                     |   25 +-
 related/inverted_index_test.go                |   42 +-
 resources/image.go                            |   15 +-
 resources/image_cache.go                      |   20 +-
 resources/page/page.go                        |  365 +++
 .../page/page_author.go                       |    4 +-
 resources/page/page_data.go                   |   42 +
 resources/page/page_data_test.go              |   57 +
 resources/page/page_generate/.gitignore       |    1 +
 .../page_generate/generate_page_wrappers.go   |  212 ++
 .../site.go => resources/page/page_kinds.go   |   21 +-
 resources/page/page_kinds_test.go             |   31 +
 resources/page/page_marshaljson.autogen.go    |  198 ++
 resources/page/page_nop.go                    |  463 ++++
 resources/page/page_outputformat.go           |   85 +
 resources/page/page_paths.go                  |  334 +++
 resources/page/page_paths_test.go             |  258 ++
 resources/page/page_wrappers.autogen.go       |   97 +
 .../page/pagegroup.go                         |  135 +-
 .../page/pagegroup_test.go                    |   96 +-
 .../page}/pagemeta/page_frontmatter.go        |   15 +-
 .../page}/pagemeta/page_frontmatter_test.go   |   41 +-
 .../page}/pagemeta/pagemeta.go                |   13 +-
 resources/page/pages.go                       |  115 +
 .../page/pages_cache.go                       |    4 +-
 .../page/pages_cache_test.go                  |   14 +-
 .../page}/pages_language_merge.go             |    6 +-
 .../page/pages_prev_next.go                   |   12 +-
 .../page/pages_prev_next_test.go              |   19 +-
 {hugolib => resources/page}/pages_related.go  |   52 +-
 resources/page/pages_related_test.go          |   86 +
 .../page/pages_sort.go                        |  106 +-
 .../page/pages_sort_test.go                   |  116 +-
 {hugolib => resources/page}/pagination.go     |  267 +-
 resources/page/pagination_test.go             |  307 +++
 resources/page/permalinks.go                  |  248 ++
 resources/page/permalinks_test.go             |  180 ++
 resources/page/site.go                        |   53 +
 resources/page/testhelpers_test.go            |  554 ++++
 resources/page/weighted.go                    |  140 +
 resources/resource.go                         |   63 +-
 resources/resource/dates.go                   |   81 +
 resources/resource/params.go                  |   89 +
 resources/resource/resource_helpers.go        |   70 +
 resources/resource/resourcetypes.go           |   98 +-
 resources/resource_metadata.go                |    1 -
 resources/resource_metadata_test.go           |    4 +-
 resources/resource_test.go                    |    6 +-
 resources/testhelpers_test.go                 |   19 +-
 resources/transform.go                        |    4 +-
 source/fileInfo.go                            |   52 +-
 tpl/collections/apply_test.go                 |    6 +-
 tpl/collections/collections.go                |    8 +-
 tpl/collections/collections_test.go           |   38 +-
 tpl/template.go                               |   43 +-
 tpl/template_info.go                          |   35 +
 tpl/tplimpl/ace.go                            |   15 +-
 tpl/tplimpl/embedded/generate/generate.go     |    4 +-
 tpl/tplimpl/embedded/templates.autogen.go     |   24 +-
 .../embedded/templates/_default/rss.xml       |    8 +-
 .../embedded/templates/_default/sitemap.xml   |    5 +-
 .../templates/_default/sitemapindex.xml       |    1 +
 tpl/tplimpl/embedded/templates/disqus.html    |    2 +-
 tpl/tplimpl/shortcodes.go                     |  148 +
 tpl/tplimpl/shortcodes_test.go                |   94 +
 tpl/tplimpl/template.go                       |  201 +-
 tpl/tplimpl/templateFuncster.go               |   46 +-
 tpl/tplimpl/template_ast_transformers.go      |  132 +-
 tpl/tplimpl/template_ast_transformers_test.go |   66 +-
 tpl/tplimpl/template_funcs_test.go            |   20 +-
 ...template_test.go => template_info_test.go} |   48 +-
 205 files changed, 13977 insertions(+), 9366 deletions(-)
 create mode 100755 benchbep.sh
 create mode 100644 codegen/methods.go
 rename hugolib/page_resource.go => codegen/methods2_test.go (70%)
 create mode 100644 codegen/methods_test.go
 rename {hugolib => config}/sitemap.go (73%)
 create mode 100644 hugofs/createcounting_fs.go
 create mode 100644 hugofs/stacktracer_fs.go
 create mode 100644 hugolib/hugo_smoke_test.go
 delete mode 100644 hugolib/media.go
 delete mode 100644 hugolib/orderedMap.go
 delete mode 100644 hugolib/orderedMap_test.go
 create mode 100644 hugolib/page__common.go
 create mode 100644 hugolib/page__data.go
 create mode 100644 hugolib/page__menus.go
 create mode 100644 hugolib/page__meta.go
 create mode 100644 hugolib/page__new.go
 create mode 100644 hugolib/page__output.go
 create mode 100644 hugolib/page__paths.go
 create mode 100644 hugolib/page__per_output.go
 rename hugolib/{page_ref.go => page__ref.go} (56%)
 create mode 100644 hugolib/page__tree.go
 delete mode 100644 hugolib/page_errors.go
 create mode 100644 hugolib/page_kinds.go
 delete mode 100644 hugolib/page_output.go
 create mode 100644 hugolib/page_pagination.go
 delete mode 100644 hugolib/page_paths.go
 delete mode 100644 hugolib/page_paths_test.go
 create mode 100644 hugolib/page_position.go
 delete mode 100644 hugolib/page_taxonomy_test.go
 delete mode 100644 hugolib/page_time_integration_test.go
 create mode 100644 hugolib/page_unwrap.go
 rename hugolib/{path_separators_test.go => page_unwrap_test.go} (57%)
 delete mode 100644 hugolib/page_without_content.go
 delete mode 100644 hugolib/pages_related_test.go
 delete mode 100644 hugolib/pagination_test.go
 delete mode 100644 hugolib/permalinks.go
 delete mode 100644 hugolib/permalinks_test.go
 create mode 100644 hugolib/shortcode_page.go
 create mode 100644 lazy/init.go
 create mode 100644 lazy/init_test.go
 create mode 100644 lazy/once.go
 rename {hugolib => navigation}/menu.go (89%)
 create mode 100644 navigation/pagemenus.go
 create mode 100644 parser/pageparser/itemtype_string.go
 create mode 100644 resources/page/page.go
 rename hugolib/author.go => resources/page/page_author.go (94%)
 create mode 100644 resources/page/page_data.go
 create mode 100644 resources/page/page_data_test.go
 create mode 100644 resources/page/page_generate/.gitignore
 create mode 100644 resources/page/page_generate/generate_page_wrappers.go
 rename common/hugo/site.go => resources/page/page_kinds.go (61%)
 create mode 100644 resources/page/page_kinds_test.go
 create mode 100644 resources/page/page_marshaljson.autogen.go
 create mode 100644 resources/page/page_nop.go
 create mode 100644 resources/page/page_outputformat.go
 create mode 100644 resources/page/page_paths.go
 create mode 100644 resources/page/page_paths_test.go
 create mode 100644 resources/page/page_wrappers.autogen.go
 rename hugolib/pageGroup.go => resources/page/pagegroup.go (71%)
 rename hugolib/pageGroup_test.go => resources/page/pagegroup_test.go (83%)
 rename {hugolib => resources/page}/pagemeta/page_frontmatter.go (97%)
 rename {hugolib => resources/page}/pagemeta/page_frontmatter_test.go (88%)
 rename {hugolib => resources/page}/pagemeta/pagemeta.go (77%)
 create mode 100644 resources/page/pages.go
 rename hugolib/pageCache.go => resources/page/pages_cache.go (97%)
 rename hugolib/pageCache_test.go => resources/page/pages_cache_test.go (85%)
 rename {hugolib => resources/page}/pages_language_merge.go (94%)
 rename hugolib/pagesPrevNext.go => resources/page/pages_prev_next.go (77%)
 rename hugolib/pagesPrevNext_test.go => resources/page/pages_prev_next_test.go (85%)
 rename {hugolib => resources/page}/pages_related.go (81%)
 create mode 100644 resources/page/pages_related_test.go
 rename hugolib/pageSort.go => resources/page/pages_sort.go (77%)
 rename hugolib/pageSort_test.go => resources/page/pages_sort_test.go (72%)
 rename {hugolib => resources/page}/pagination.go (56%)
 create mode 100644 resources/page/pagination_test.go
 create mode 100644 resources/page/permalinks.go
 create mode 100644 resources/page/permalinks_test.go
 create mode 100644 resources/page/site.go
 create mode 100644 resources/page/testhelpers_test.go
 create mode 100644 resources/page/weighted.go
 create mode 100644 resources/resource/dates.go
 create mode 100644 resources/resource/params.go
 create mode 100644 resources/resource/resource_helpers.go
 create mode 100644 tpl/template_info.go
 create mode 100644 tpl/tplimpl/shortcodes.go
 create mode 100644 tpl/tplimpl/shortcodes_test.go
 rename tpl/tplimpl/{template_test.go => template_info_test.go} (55%)

diff --git a/benchbep.sh b/benchbep.sh
new file mode 100755
index 00000000000..e94cc4e6308
--- /dev/null
+++ b/benchbep.sh
@@ -0,0 +1,2 @@
+gobench -package=./hugolib -bench="BenchmarkSiteBuilding/TOML,num_langs=3,num_pages=5000,tags_per_page=5,shortcodes,render" -count=3 > 1.bench
+benchcmp -best 0.bench 1.bench
\ No newline at end of file
diff --git a/codegen/methods.go b/codegen/methods.go
new file mode 100644
index 00000000000..007384f9b62
--- /dev/null
+++ b/codegen/methods.go
@@ -0,0 +1,529 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+// Some functions in this file (see comments) is based on the Go source code,
+// copyright The Go Authors and  governed by a BSD-style license.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package codegen contains helpers for code generation.
+package codegen
+
+import (
+	"fmt"
+	"go/ast"
+	"go/parser"
+	"go/token"
+	"os"
+	"path"
+	"path/filepath"
+	"reflect"
+	"regexp"
+	"sort"
+	"strings"
+	"sync"
+)
+
+// Make room for insertions
+const weightWidth = 1000
+
+// NewInspector creates a new Inspector given a source root.
+func NewInspector(root string) *Inspector {
+	return &Inspector{ProjectRootDir: root}
+}
+
+// Inspector provides methods to help code generation. It uses a combination
+// of reflection and source code AST to do the heavy lifting.
+type Inspector struct {
+	ProjectRootDir string
+
+	init sync.Once
+
+	// Determines method order. Go's reflect sorts lexicographically, so
+	// we must parse the source to preserve this order.
+	methodWeight map[string]map[string]int
+}
+
+// MethodsFromTypes create a method set from the include slice, excluding any
+// method in exclude.
+func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.Type) Methods {
+	c.parseSource()
+
+	var methods Methods
+
+	var excludes = make(map[string]bool)
+
+	if len(exclude) > 0 {
+		for _, m := range c.MethodsFromTypes(exclude, nil) {
+			excludes[m.Name] = true
+		}
+	}
+
+	// There may be overlapping interfaces in types. Do a simple check for now.
+	seen := make(map[string]bool)
+
+	nameAndPackage := func(t reflect.Type) (string, string) {
+		var name, pkg string
+
+		isPointer := t.Kind() == reflect.Ptr
+
+		if isPointer {
+			t = t.Elem()
+		}
+
+		pkgPrefix := ""
+		if pkgPath := t.PkgPath(); pkgPath != "" {
+			pkgPath = strings.TrimSuffix(pkgPath, "/")
+			_, shortPath := path.Split(pkgPath)
+			pkgPrefix = shortPath + "."
+			pkg = pkgPath
+		}
+
+		name = t.Name()
+		if name == "" {
+			// interface{}
+			name = t.String()
+		}
+
+		if isPointer {
+			pkgPrefix = "*" + pkgPrefix
+		}
+
+		name = pkgPrefix + name
+
+		return name, pkg
+
+	}
+
+	for _, t := range include {
+
+		for i := 0; i < t.NumMethod(); i++ {
+
+			m := t.Method(i)
+			if excludes[m.Name] || seen[m.Name] {
+				continue
+			}
+
+			seen[m.Name] = true
+
+			if m.PkgPath != "" {
+				// Not exported
+				continue
+			}
+
+			numIn := m.Type.NumIn()
+
+			ownerName, _ := nameAndPackage(t)
+
+			method := Method{Owner: t, OwnerName: ownerName, Name: m.Name}
+
+			for i := 0; i < numIn; i++ {
+				in := m.Type.In(i)
+
+				name, pkg := nameAndPackage(in)
+
+				if pkg != "" {
+					method.Imports = append(method.Imports, pkg)
+				}
+
+				method.In = append(method.In, name)
+			}
+
+			numOut := m.Type.NumOut()
+
+			if numOut > 0 {
+				for i := 0; i < numOut; i++ {
+					out := m.Type.Out(i)
+					name, pkg := nameAndPackage(out)
+
+					if pkg != "" {
+						method.Imports = append(method.Imports, pkg)
+					}
+
+					method.Out = append(method.Out, name)
+				}
+			}
+
+			methods = append(methods, method)
+		}
+
+	}
+
+	sort.SliceStable(methods, func(i, j int) bool {
+		mi, mj := methods[i], methods[j]
+
+		wi := c.methodWeight[mi.OwnerName][mi.Name]
+		wj := c.methodWeight[mj.OwnerName][mj.Name]
+
+		if wi == wj {
+			return mi.Name < mj.Name
+		}
+
+		return wi < wj
+
+	})
+
+	return methods
+
+}
+
+func (c *Inspector) parseSource() {
+	c.init.Do(func() {
+
+		if !strings.Contains(c.ProjectRootDir, "hugo") {
+			panic("dir must be set to the Hugo root")
+		}
+
+		c.methodWeight = make(map[string]map[string]int)
+		dirExcludes := regexp.MustCompile("docs|examples")
+		fileExcludes := regexp.MustCompile("autogen")
+		var filenames []string
+
+		filepath.Walk(c.ProjectRootDir, func(path string, info os.FileInfo, err error) error {
+			if info.IsDir() {
+				if dirExcludes.MatchString(info.Name()) {
+					return filepath.SkipDir
+				}
+			}
+
+			if !strings.HasSuffix(path, ".go") || fileExcludes.MatchString(path) {
+				return nil
+			}
+
+			filenames = append(filenames, path)
+
+			return nil
+
+		})
+
+		for _, filename := range filenames {
+
+			pkg := c.packageFromPath(filename)
+
+			fset := token.NewFileSet()
+			node, err := parser.ParseFile(fset, filename, nil, parser.ParseComments)
+			if err != nil {
+				panic(err)
+			}
+
+			ast.Inspect(node, func(n ast.Node) bool {
+				switch t := n.(type) {
+				case *ast.TypeSpec:
+					if t.Name.IsExported() {
+						switch it := t.Type.(type) {
+						case *ast.InterfaceType:
+							iface := pkg + "." + t.Name.Name
+							methodNames := collectMethodsRecursive(pkg, it.Methods.List)
+							weights := make(map[string]int)
+							weight := weightWidth
+							for _, name := range methodNames {
+								weights[name] = weight
+								weight += weightWidth
+							}
+							c.methodWeight[iface] = weights
+						}
+					}
+
+				}
+				return true
+			})
+
+		}
+
+		// Complement
+		for _, v1 := range c.methodWeight {
+			for k2, w := range v1 {
+				if v, found := c.methodWeight[k2]; found {
+					for k3, v3 := range v {
+						v1[k3] = (v3 / weightWidth) + w
+					}
+				}
+			}
+		}
+
+	})
+}
+
+func (c *Inspector) packageFromPath(p string) string {
+	p = filepath.ToSlash(p)
+	base := path.Base(p)
+	if !strings.Contains(base, ".") {
+		return base
+	}
+	return path.Base(strings.TrimSuffix(p, base))
+}
+
+// Method holds enough information about it to recreate it.
+type Method struct {
+	// The interface we extracted this method from.
+	Owner reflect.Type
+
+	// String version of the above, on the form PACKAGE.NAME, e.g.
+	// page.Page
+	OwnerName string
+
+	// Method name.
+	Name string
+
+	// Imports needed to satisfy the method signature.
+	Imports []string
+
+	// Argument types, including any package prefix, e.g. string, int, interface{},
+	// net.Url
+	In []string
+
+	// Return types.
+	Out []string
+}
+
+// Declaration creates a method declaration (without any body) for the given receiver.
+func (m Method) Declaration(receiver string) string {
+	return fmt.Sprintf("func (%s %s) %s%s %s", receiverShort(receiver), receiver, m.Name, m.inStr(), m.outStr())
+}
+
+// Delegate creates a delegate call string.
+func (m Method) Delegate(receiver, delegate string) string {
+	ret := ""
+	if len(m.Out) > 0 {
+		ret = "return "
+	}
+	return fmt.Sprintf("%s%s.%s.%s%s", ret, receiverShort(receiver), delegate, m.Name, m.inOutStr())
+}
+
+func (m Method) String() string {
+	return m.Name + m.inStr() + " " + m.outStr() + "\n"
+}
+
+func (m Method) inOutStr() string {
+	if len(m.In) == 0 {
+		return "()"
+	}
+
+	args := make([]string, len(m.In))
+	for i := 0; i < len(args); i++ {
+		args[i] = fmt.Sprintf("arg%d", i)
+	}
+	return "(" + strings.Join(args, ", ") + ")"
+}
+
+func (m Method) inStr() string {
+	if len(m.In) == 0 {
+		return "()"
+	}
+
+	args := make([]string, len(m.In))
+	for i := 0; i < len(args); i++ {
+		args[i] = fmt.Sprintf("arg%d %s", i, m.In[i])
+	}
+	return "(" + strings.Join(args, ", ") + ")"
+}
+
+func (m Method) outStr() string {
+	if len(m.Out) == 0 {
+		return ""
+	}
+	if len(m.Out) == 1 {
+		return m.Out[0]
+	}
+
+	return "(" + strings.Join(m.Out, ", ") + ")"
+}
+
+// Methods represents a list of methods for one or more interfaces.
+// The order matches the defined order in their source file(s).
+type Methods []Method
+
+// Imports returns a sorted list of package imports needed to satisfy the
+// signatures of all methods.
+func (m Methods) Imports() []string {
+	var pkgImports []string
+	for _, method := range m {
+		pkgImports = append(pkgImports, method.Imports...)
+	}
+	if len(pkgImports) > 0 {
+		pkgImports = uniqueNonEmptyStrings(pkgImports)
+		sort.Strings(pkgImports)
+	}
+	return pkgImports
+}
+
+// ToMarshalJSON creates a MarshalJSON method for these methods. Any method name
+// matchin any of the regexps in excludes will be ignored.
+func (m Methods) ToMarshalJSON(receiver, pkgPath string, excludes ...string) (string, []string) {
+	var sb strings.Builder
+
+	r := receiverShort(receiver)
+	what := firstToUpper(trimAsterisk(receiver))
+	pgkName := path.Base(pkgPath)
+
+	fmt.Fprintf(&sb, "func Marshal%sToJSON(%s %s) ([]byte, error) {\n", what, r, receiver)
+
+	var methods Methods
+	var excludeRes = make([]*regexp.Regexp, len(excludes))
+
+	for i, exclude := range excludes {
+		excludeRes[i] = regexp.MustCompile(exclude)
+	}
+
+	for _, method := range m {
+		// Exclude methods with arguments and incompatible return values
+		if len(method.In) > 0 || len(method.Out) == 0 || len(method.Out) > 2 {
+			continue
+		}
+
+		if len(method.Out) == 2 {
+			if method.Out[1] != "error" {
+				continue
+			}
+		}
+
+		for _, re := range excludeRes {
+			if re.MatchString(method.Name) {
+				continue
+			}
+		}
+
+		methods = append(methods, method)
+	}
+
+	for _, method := range methods {
+		varn := varName(method.Name)
+		if len(method.Out) == 1 {
+			fmt.Fprintf(&sb, "\t%s := %s.%s()\n", varn, r, method.Name)
+		} else {
+			fmt.Fprintf(&sb, "\t%s, err := %s.%s()\n", varn, r, method.Name)
+			fmt.Fprint(&sb, "\tif err != nil {\n\t\treturn nil, err\n\t}\n")
+		}
+	}
+
+	fmt.Fprint(&sb, "\n\ts := struct {\n")
+
+	for _, method := range methods {
+		fmt.Fprintf(&sb, "\t\t%s %s\n", method.Name, typeName(method.Out[0], pgkName))
+	}
+
+	fmt.Fprint(&sb, "\n\t}{\n")
+
+	for _, method := range methods {
+		varn := varName(method.Name)
+		fmt.Fprintf(&sb, "\t\t%s: %s,\n", method.Name, varn)
+	}
+
+	fmt.Fprint(&sb, "\n\t}\n\n")
+	fmt.Fprint(&sb, "\treturn json.Marshal(&s)\n}")
+
+	pkgImports := append(methods.Imports(), "encoding/json")
+
+	if pkgPath != "" {
+		// Exclude self
+		for i, pkgImp := range pkgImports {
+			if pkgImp == pkgPath {
+				pkgImports = append(pkgImports[:i], pkgImports[i+1:]...)
+			}
+		}
+	}
+
+	return sb.String(), pkgImports
+
+}
+
+func collectMethodsRecursive(pkg string, f []*ast.Field) []string {
+	var methodNames []string
+	for _, m := range f {
+		if m.Names != nil {
+			methodNames = append(methodNames, m.Names[0].Name)
+			continue
+		}
+
+		if ident, ok := m.Type.(*ast.Ident); ok && ident.Obj != nil {
+			// Embedded interface
+			methodNames = append(
+				methodNames,
+				collectMethodsRecursive(
+					pkg,
+					ident.Obj.Decl.(*ast.TypeSpec).Type.(*ast.InterfaceType).Methods.List)...)
+		} else {
+			// Embedded, but in a different file/package. Return the
+			// package.Name and deal with that later.
+			name := packageName(m.Type)
+			if !strings.Contains(name, ".") {
+				// Assume current package
+				name = pkg + "." + name
+			}
+			methodNames = append(methodNames, name)
+		}
+	}
+
+	return methodNames
+
+}
+
+func firstToLower(name string) string {
+	return strings.ToLower(name[:1]) + name[1:]
+}
+
+func firstToUpper(name string) string {
+	return strings.ToUpper(name[:1]) + name[1:]
+}
+
+func packageName(e ast.Expr) string {
+	switch tp := e.(type) {
+	case *ast.Ident:
+		return tp.Name
+	case *ast.SelectorExpr:
+		return fmt.Sprintf("%s.%s", packageName(tp.X), packageName(tp.Sel))
+	}
+	return ""
+}
+
+func receiverShort(receiver string) string {
+	return strings.ToLower(trimAsterisk(receiver))[:1]
+}
+
+func trimAsterisk(name string) string {
+	return strings.TrimPrefix(name, "*")
+}
+
+func typeName(name, pkg string) string {
+	return strings.TrimPrefix(name, pkg+".")
+}
+
+func uniqueNonEmptyStrings(s []string) []string {
+	var unique []string
+	set := map[string]interface{}{}
+	for _, val := range s {
+		if val == "" {
+			continue
+		}
+		if _, ok := set[val]; !ok {
+			unique = append(unique, val)
+			set[val] = val
+		}
+	}
+	return unique
+}
+
+func varName(name string) string {
+	name = firstToLower(name)
+
+	// Adjust some reserved keywords, see https://golang.org/ref/spec#Keywords
+	switch name {
+	case "type":
+		name = "typ"
+	case "package":
+		name = "pkg"
+		// Not reserved, but syntax highlighters has it as a keyword.
+	case "len":
+		name = "length"
+	}
+
+	return name
+
+}
diff --git a/hugolib/page_resource.go b/codegen/methods2_test.go
similarity index 70%
rename from hugolib/page_resource.go
rename to codegen/methods2_test.go
index 201076e8b0b..bd36b5e80f5 100644
--- a/hugolib/page_resource.go
+++ b/codegen/methods2_test.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -11,13 +11,10 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package hugolib
+package codegen
 
-import (
-	"github.com/gohugoio/hugo/resources/resource"
-)
-
-var (
-	_ resource.Resource = (*Page)(nil)
-	_ resource.Resource = (*PageOutput)(nil)
-)
+type IEmbed interface {
+	MethodEmbed3(s string) string
+	MethodEmbed1() string
+	MethodEmbed2()
+}
diff --git a/codegen/methods_test.go b/codegen/methods_test.go
new file mode 100644
index 00000000000..fad6da078ab
--- /dev/null
+++ b/codegen/methods_test.go
@@ -0,0 +1,100 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package codegen
+
+import (
+	"fmt"
+	"net"
+	"os"
+	"reflect"
+	"testing"
+
+	"github.com/gohugoio/hugo/common/herrors"
+	"github.com/stretchr/testify/require"
+)
+
+func TestMethods(t *testing.T) {
+
+	var (
+		zeroIE     = reflect.TypeOf((*IEmbed)(nil)).Elem()
+		zeroIEOnly = reflect.TypeOf((*IEOnly)(nil)).Elem()
+		zeroI      = reflect.TypeOf((*I)(nil)).Elem()
+	)
+
+	dir, _ := os.Getwd()
+	c := NewInspector(dir)
+
+	t.Run("MethodsFromTypes", func(t *testing.T) {
+		assert := require.New(t)
+
+		methods := c.MethodsFromTypes([]reflect.Type{zeroI}, nil)
+
+		methodsStr := fmt.Sprint(methods)
+
+		assert.Contains(methodsStr, "Method1(arg0 herrors.ErrorContext)")
+		assert.Contains(methodsStr, "Method7() interface {}")
+		assert.Contains(methodsStr, "Method0() string\n Method4() string")
+		assert.Contains(methodsStr, "MethodEmbed3(arg0 string) string\n MethodEmbed1() string")
+
+		assert.Contains(methods.Imports(), "github.com/gohugoio/hugo/common/herrors")
+	})
+
+	t.Run("EmbedOnly", func(t *testing.T) {
+		assert := require.New(t)
+
+		methods := c.MethodsFromTypes([]reflect.Type{zeroIEOnly}, nil)
+
+		methodsStr := fmt.Sprint(methods)
+
+		assert.Contains(methodsStr, "MethodEmbed3(arg0 string) string")
+
+	})
+
+	t.Run("ToMarshalJSON", func(t *testing.T) {
+		assert := require.New(t)
+
+		m, pkg := c.MethodsFromTypes(
+			[]reflect.Type{zeroI},
+			[]reflect.Type{zeroIE}).ToMarshalJSON("*page", "page")
+
+		assert.Contains(m, "method6 := p.Method6()")
+		assert.Contains(m, "Method0: method0,")
+		assert.Contains(m, "return json.Marshal(&s)")
+
+		assert.Contains(pkg, "github.com/gohugoio/hugo/common/herrors")
+		assert.Contains(pkg, "encoding/json")
+
+		fmt.Println(pkg)
+
+	})
+
+}
+
+type I interface {
+	IEmbed
+	Method0() string
+	Method4() string
+	Method1(myerr herrors.ErrorContext)
+	Method3(myint int, mystring string)
+	Method5() (string, error)
+	Method6() *net.IP
+	Method7() interface{}
+	Method8() herrors.ErrorContext
+	method2()
+	method9() os.FileInfo
+}
+
+type IEOnly interface {
+	IEmbed
+}
diff --git a/commands/commandeer.go b/commands/commandeer.go
index 8abb6418d7c..8c9da53b92b 100644
--- a/commands/commandeer.go
+++ b/commands/commandeer.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -357,6 +357,13 @@ func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error {
 			c.changeDetector = changeDetector
 		}
 
+		if c.Cfg.GetBool("logPathWarnings") {
+			fs.Destination = hugofs.NewCreateCountingFs(fs.Destination)
+		}
+
+		// To debug hard-to-find path issues.
+		//fs.Destination = hugofs.NewStacktracerFs(fs.Destination, `fr/fr`)
+
 		err = c.initFs(fs)
 		if err != nil {
 			return
diff --git a/commands/commands.go b/commands/commands.go
index 38291fd958a..fa02b2e818b 100644
--- a/commands/commands.go
+++ b/commands/commands.go
@@ -1,4 +1,4 @@
-// Copyright 2017 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -23,7 +23,6 @@ import (
 	"github.com/gohugoio/hugo/config"
 	"github.com/gohugoio/hugo/helpers"
 	"github.com/spf13/cobra"
-	"github.com/spf13/nitro"
 )
 
 type commandsBuilder struct {
@@ -197,6 +196,12 @@ type hugoBuilderCommon struct {
 
 	gc bool
 
+	// Profile flags (for debugging of performance problems)
+	cpuprofile   string
+	memprofile   string
+	mutexprofile string
+	traceprofile string
+
 	// TODO(bep) var vs string
 	logging    bool
 	verbose    bool
@@ -255,13 +260,22 @@ func (cc *hugoBuilderCommon) handleFlags(cmd *cobra.Command) {
 	cmd.Flags().Bool("enableGitInfo", false, "add Git revision, date and author info to the pages")
 	cmd.Flags().BoolVar(&cc.gc, "gc", false, "enable to run some cleanup tasks (remove unused cache files) after the build")
 
-	cmd.Flags().BoolVar(&nitro.AnalysisOn, "stepAnalysis", false, "display memory and timing of different steps of the program")
 	cmd.Flags().Bool("templateMetrics", false, "display metrics about template executions")
 	cmd.Flags().Bool("templateMetricsHints", false, "calculate some improvement hints when combined with --templateMetrics")
 	cmd.Flags().BoolP("forceSyncStatic", "", false, "copy all files when static is changed.")
 	cmd.Flags().BoolP("noTimes", "", false, "don't sync modification time of files")
 	cmd.Flags().BoolP("noChmod", "", false, "don't sync permission mode of files")
 	cmd.Flags().BoolP("i18n-warnings", "", false, "print missing translations")
+	cmd.Flags().BoolP("path-warnings", "", false, "print warnings on duplicate target paths etc.")
+	cmd.Flags().StringVarP(&cc.cpuprofile, "profile-cpu", "", "", "write cpu profile to `file`")
+	cmd.Flags().StringVarP(&cc.memprofile, "profile-mem", "", "", "write memory profile to `file`")
+	cmd.Flags().StringVarP(&cc.mutexprofile, "profile-mutex", "", "", "write Mutex profile to `file`")
+	cmd.Flags().StringVarP(&cc.traceprofile, "trace", "", "", "write trace to `file` (not useful in general)")
+
+	// Hide these for now.
+	cmd.Flags().MarkHidden("profile-cpu")
+	cmd.Flags().MarkHidden("profile-mem")
+	cmd.Flags().MarkHidden("profile-mutex")
 
 	cmd.Flags().StringSlice("disableKinds", []string{}, "disable different kind of pages (home, RSS etc.)")
 
diff --git a/commands/commands_test.go b/commands/commands_test.go
index 2e8b99dc413..a1c6cdd7601 100644
--- a/commands/commands_test.go
+++ b/commands/commands_test.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -20,6 +20,8 @@ import (
 	"path/filepath"
 	"testing"
 
+	"github.com/gohugoio/hugo/common/types"
+
 	"github.com/spf13/cobra"
 	"github.com/spf13/viper"
 
@@ -41,7 +43,7 @@ func TestExecute(t *testing.T) {
 	assert.NoError(resp.Err)
 	result := resp.Result
 	assert.True(len(result.Sites) == 1)
-	assert.True(len(result.Sites[0].RegularPages) == 1)
+	assert.True(len(result.Sites[0].RegularPages()) == 1)
 }
 
 func TestCommandsPersistentFlags(t *testing.T) {
@@ -75,6 +77,7 @@ func TestCommandsPersistentFlags(t *testing.T) {
 		"--port=1366",
 		"--renderToDisk",
 		"--source=mysource",
+		"--path-warnings",
 	}, func(commands []cmder) {
 		var sc *serverCmd
 		for _, command := range commands {
@@ -112,6 +115,9 @@ func TestCommandsPersistentFlags(t *testing.T) {
 
 		assert.True(cfg.GetBool("gc"))
 
+		// The flag is named path-warnings
+		assert.True(cfg.GetBool("logPathWarnings"))
+
 		// The flag is named i18n-warnings
 		assert.True(cfg.GetBool("logI18nWarnings"))
 
@@ -183,8 +189,8 @@ func TestCommandsExecute(t *testing.T) {
 	}
 
 	for _, test := range tests {
-
-		hugoCmd := newCommandsBuilder().addAll().build().getCommand()
+		b := newCommandsBuilder().addAll().build()
+		hugoCmd := b.getCommand()
 		test.flags = append(test.flags, "--quiet")
 		hugoCmd.SetArgs(append(test.commands, test.flags...))
 
@@ -200,6 +206,13 @@ func TestCommandsExecute(t *testing.T) {
 			assert.NoError(err, fmt.Sprintf("%v", test.commands))
 		}
 
+		// Assert that we have not left any development debug artifacts in
+		// the code.
+		if b.c != nil {
+			_, ok := b.c.destinationFs.(types.DevMarker)
+			assert.False(ok)
+		}
+
 	}
 
 }
diff --git a/commands/convert.go b/commands/convert.go
index c4f88a24537..e7ba572bc62 100644
--- a/commands/convert.go
+++ b/commands/convert.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -20,6 +20,8 @@ import (
 	"strings"
 	"time"
 
+	"github.com/gohugoio/hugo/resources/page"
+
 	"github.com/gohugoio/hugo/hugofs"
 
 	"github.com/gohugoio/hugo/helpers"
@@ -124,8 +126,8 @@ func (cc *convertCmd) convertContents(format metadecoders.Format) error {
 
 	site := h.Sites[0]
 
-	site.Log.FEEDBACK.Println("processing", len(site.AllPages), "content files")
-	for _, p := range site.AllPages {
+	site.Log.FEEDBACK.Println("processing", len(site.AllPages()), "content files")
+	for _, p := range site.AllPages() {
 		if err := cc.convertAndSavePage(p, site, format); err != nil {
 			return err
 		}
@@ -133,24 +135,24 @@ func (cc *convertCmd) convertContents(format metadecoders.Format) error {
 	return nil
 }
 
-func (cc *convertCmd) convertAndSavePage(p *hugolib.Page, site *hugolib.Site, targetFormat metadecoders.Format) error {
+func (cc *convertCmd) convertAndSavePage(p page.Page, site *hugolib.Site, targetFormat metadecoders.Format) error {
 	// The resources are not in .Site.AllPages.
-	for _, r := range p.Resources.ByType("page") {
-		if err := cc.convertAndSavePage(r.(*hugolib.Page), site, targetFormat); err != nil {
+	for _, r := range p.Resources().ByType("page") {
+		if err := cc.convertAndSavePage(r.(page.Page), site, targetFormat); err != nil {
 			return err
 		}
 	}
 
-	if p.Filename() == "" {
+	if p.File() == nil {
 		// No content file.
 		return nil
 	}
 
 	errMsg := fmt.Errorf("Error processing file %q", p.Path())
 
-	site.Log.INFO.Println("Attempting to convert", p.LogicalName())
+	site.Log.INFO.Println("Attempting to convert", p.File().Filename())
 
-	f, _ := p.File.(src.ReadableFile)
+	f, _ := p.File().(src.ReadableFile)
 	file, err := f.Open()
 	if err != nil {
 		site.Log.ERROR.Println(errMsg)
@@ -186,7 +188,7 @@ func (cc *convertCmd) convertAndSavePage(p *hugolib.Page, site *hugolib.Site, ta
 
 	newContent.Write(pf.content)
 
-	newFilename := p.Filename()
+	newFilename := p.File().Filename()
 
 	if cc.outputDir != "" {
 		contentDir := strings.TrimSuffix(newFilename, p.Path())
diff --git a/commands/hugo.go b/commands/hugo.go
index 3690c0ad519..648ba3af639 100644
--- a/commands/hugo.go
+++ b/commands/hugo.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -18,11 +18,16 @@ package commands
 import (
 	"fmt"
 	"io/ioutil"
-
 	"os/signal"
+	"runtime/pprof"
+	"runtime/trace"
 	"sort"
 	"sync/atomic"
 
+	"github.com/gohugoio/hugo/hugofs"
+
+	"github.com/gohugoio/hugo/resources/page"
+
 	"github.com/gohugoio/hugo/common/hugo"
 	"github.com/pkg/errors"
 
@@ -214,6 +219,7 @@ func initializeFlags(cmd *cobra.Command, cfg config.Provider) {
 		"themesDir",
 		"verbose",
 		"verboseLog",
+		"duplicateTargetPaths",
 	}
 
 	// Will set a value even if it is the default.
@@ -235,6 +241,7 @@ func initializeFlags(cmd *cobra.Command, cfg config.Provider) {
 	// Set some "config aliases"
 	setValueFromFlag(cmd.Flags(), "destination", cfg, "publishDir", false)
 	setValueFromFlag(cmd.Flags(), "i18n-warnings", cfg, "logI18nWarnings", false)
+	setValueFromFlag(cmd.Flags(), "path-warnings", cfg, "logPathWarnings", false)
 
 }
 
@@ -290,6 +297,7 @@ func (c *commandeer) fullBuild() error {
 	}
 
 	copyStaticFunc := func() error {
+
 		cnt, err := c.copyStatic()
 		if err != nil {
 			if !os.IsNotExist(err) {
@@ -326,7 +334,7 @@ func (c *commandeer) fullBuild() error {
 	}
 
 	for _, s := range c.hugo.Sites {
-		s.ProcessingStats.Static = langCount[s.Language.Lang]
+		s.ProcessingStats.Static = langCount[s.Language().Lang]
 	}
 
 	if c.h.gc {
@@ -344,9 +352,125 @@ func (c *commandeer) fullBuild() error {
 
 }
 
+func (c *commandeer) initCPUProfile() (func(), error) {
+	if c.h.cpuprofile == "" {
+		return nil, nil
+	}
+
+	f, err := os.Create(c.h.cpuprofile)
+	if err != nil {
+		return nil, errors.Wrap(err, "failed to create CPU profile")
+	}
+	if err := pprof.StartCPUProfile(f); err != nil {
+		return nil, errors.Wrap(err, "failed to start CPU profile")
+	}
+	return func() {
+		pprof.StopCPUProfile()
+		f.Close()
+	}, nil
+}
+
+func (c *commandeer) initMemProfile() {
+	if c.h.memprofile == "" {
+		return
+	}
+
+	f, err := os.Create(c.h.memprofile)
+	if err != nil {
+		c.logger.ERROR.Println("could not create memory profile: ", err)
+	}
+	defer f.Close()
+	runtime.GC() // get up-to-date statistics
+	if err := pprof.WriteHeapProfile(f); err != nil {
+		c.logger.ERROR.Println("could not write memory profile: ", err)
+	}
+}
+
+func (c *commandeer) initTraceProfile() (func(), error) {
+	if c.h.traceprofile == "" {
+		return nil, nil
+	}
+
+	f, err := os.Create(c.h.traceprofile)
+	if err != nil {
+		return nil, errors.Wrap(err, "failed to create trace file")
+	}
+
+	if err := trace.Start(f); err != nil {
+		return nil, errors.Wrap(err, "failed to start trace")
+	}
+
+	return func() {
+		trace.Stop()
+		f.Close()
+	}, nil
+}
+
+func (c *commandeer) initMutexProfile() (func(), error) {
+	if c.h.mutexprofile == "" {
+		return nil, nil
+	}
+
+	f, err := os.Create(c.h.mutexprofile)
+	if err != nil {
+		return nil, err
+	}
+
+	runtime.SetMutexProfileFraction(1)
+
+	return func() {
+		pprof.Lookup("mutex").WriteTo(f, 0)
+		f.Close()
+	}, nil
+
+}
+
+func (c *commandeer) initProfiling() (func(), error) {
+	stopCPUProf, err := c.initCPUProfile()
+	if err != nil {
+		return nil, err
+	}
+
+	defer c.initMemProfile()
+
+	stopMutexProf, err := c.initMutexProfile()
+	if err != nil {
+		return nil, err
+	}
+
+	stopTraceProf, err := c.initTraceProfile()
+	if err != nil {
+		return nil, err
+	}
+
+	return func() {
+		if stopCPUProf != nil {
+			stopCPUProf()
+		}
+		if stopMutexProf != nil {
+			stopMutexProf()
+		}
+
+		if stopTraceProf != nil {
+			stopTraceProf()
+		}
+	}, nil
+}
+
 func (c *commandeer) build() error {
 	defer c.timeTrack(time.Now(), "Total")
 
+	stopProfiling, err := c.initProfiling()
+	if err != nil {
+		return err
+	}
+
+	defer func() {
+		if stopProfiling != nil {
+			stopProfiling()
+		}
+	}()
+
 	if err := c.fullBuild(); err != nil {
 		return err
 	}
@@ -356,6 +480,13 @@ func (c *commandeer) build() error {
 		fmt.Println()
 		c.hugo.PrintProcessingStats(os.Stdout)
 		fmt.Println()
+
+		if createCounter, ok := c.destinationFs.(hugofs.DuplicatesReporter); ok {
+			dupes := createCounter.ReportDuplicates()
+			if dupes != "" {
+				c.logger.WARN.Println("Duplicate target paths:", dupes)
+			}
+		}
 	}
 
 	if c.h.buildWatch {
@@ -369,7 +500,7 @@ func (c *commandeer) build() error {
 		checkErr(c.Logger, err)
 		defer watcher.Close()
 
-		var sigs = make(chan os.Signal)
+		var sigs = make(chan os.Signal, 1)
 		signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
 
 		<-sigs
@@ -381,6 +512,17 @@ func (c *commandeer) build() error {
 func (c *commandeer) serverBuild() error {
 	defer c.timeTrack(time.Now(), "Total")
 
+	stopProfiling, err := c.initProfiling()
+	if err != nil {
+		return err
+	}
+
+	defer func() {
+		if stopProfiling != nil {
+			stopProfiling()
+		}
+	}()
+
 	if err := c.fullBuild(); err != nil {
 		return err
 	}
@@ -474,11 +616,9 @@ func (c *commandeer) copyStaticTo(sourceFs *filesystems.SourceFilesystem) (uint6
 	}
 	c.logger.INFO.Println("syncing static files to", publishDir)
 
-	var err error
-
 	// because we are using a baseFs (to get the union right).
 	// set sync src to root
-	err = syncer.Sync(publishDir, helpers.FilePathSeparator)
+	err := syncer.Sync(publishDir, helpers.FilePathSeparator)
 	if err != nil {
 		return 0, err
 	}
@@ -619,13 +759,6 @@ func (c *commandeer) getDirList() ([]string, error) {
 	return a, nil
 }
 
-func (c *commandeer) resetAndBuildSites() (err error) {
-	if !c.h.quiet {
-		c.logger.FEEDBACK.Println("Started building sites ...")
-	}
-	return c.hugo.Build(hugolib.BuildCfg{ResetState: true})
-}
-
 func (c *commandeer) buildSites() (err error) {
 	return c.hugo.Build(hugolib.BuildCfg{})
 }
@@ -803,6 +936,13 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher,
 	// Special handling for symbolic links inside /content.
 	filtered := []fsnotify.Event{}
 	for _, ev := range evs {
+		// TODO(bep) page
+		if c.hugo == nil {
+			panic("no hugo")
+		}
+		if c.hugo.ContentChanges == nil {
+			panic("not cc")
+		}
 		// Check the most specific first, i.e. files.
 		contentMapped := c.hugo.ContentChanges.GetSymbolicLinkMappings(ev.Name)
 		if len(contentMapped) > 0 {
@@ -973,7 +1113,7 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher,
 				navigate := c.Cfg.GetBool("navigateToChanged")
 				// We have fetched the same page above, but it may have
 				// changed.
-				var p *hugolib.Page
+				var p page.Page
 
 				if navigate {
 					if onePageName != "" {
@@ -982,7 +1122,7 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher,
 				}
 
 				if p != nil {
-					livereload.NavigateToPathForPort(p.RelPermalink(), p.Site.ServerPort())
+					livereload.NavigateToPathForPort(p.RelPermalink(), p.Site().ServerPort())
 				} else {
 					livereload.ForceRefresh()
 				}
@@ -1044,9 +1184,11 @@ func (c *commandeer) isThemeVsHugoVersionMismatch(fs afero.Fs) (dir string, mism
 		}
 
 		b, err := afero.ReadFile(fs, path)
+		if err != nil {
+			continue
+		}
 
 		tomlMeta, err := metadecoders.Default.UnmarshalToMap(b, metadecoders.TOML)
-
 		if err != nil {
 			continue
 		}
diff --git a/commands/import_jekyll.go b/commands/import_jekyll.go
index d3301b48f2f..1d37cfd9d3a 100644
--- a/commands/import_jekyll.go
+++ b/commands/import_jekyll.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -340,7 +340,7 @@ func copyDir(source string, dest string) error {
 	if err != nil {
 		return err
 	}
-	entries, err := ioutil.ReadDir(source)
+	entries, _ := ioutil.ReadDir(source)
 	for _, entry := range entries {
 		sfp := filepath.Join(source, entry.Name())
 		dfp := filepath.Join(dest, entry.Name())
@@ -373,6 +373,10 @@ func (i *importCmd) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyllPos
 		return err
 	}
 	entries, err := ioutil.ReadDir(jekyllRoot)
+	if err != nil {
+		return err
+	}
+
 	for _, entry := range entries {
 		sfp := filepath.Join(jekyllRoot, entry.Name())
 		dfp := filepath.Join(dest, entry.Name())
@@ -464,7 +468,7 @@ func convertJekyllPost(s *hugolib.Site, path, relPath, targetDir string, draft b
 
 	fs := hugofs.Os
 	if err := helpers.WriteToDisk(targetFile, strings.NewReader(content), fs); err != nil {
-		return fmt.Errorf("Failed to save file %q:", filename)
+		return fmt.Errorf("failed to save file %q: %s", filename, err)
 	}
 
 	return nil
diff --git a/commands/list.go b/commands/list.go
index f49726b62fb..99e9afe400a 100644
--- a/commands/list.go
+++ b/commands/list.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -20,6 +20,7 @@ import (
 	"time"
 
 	"github.com/gohugoio/hugo/hugolib"
+	"github.com/gohugoio/hugo/resources/resource"
 	"github.com/spf13/cobra"
 	jww "github.com/spf13/jwalterweatherman"
 )
@@ -70,7 +71,7 @@ List requires a subcommand, e.g. ` + "`hugo list drafts`.",
 
 				for _, p := range sites.Pages() {
 					if p.IsDraft() {
-						jww.FEEDBACK.Println(filepath.Join(p.File.Dir(), p.File.LogicalName()))
+						jww.FEEDBACK.Println(filepath.Join(p.File().Dir(), p.File().LogicalName()))
 					}
 
 				}
@@ -108,8 +109,8 @@ posted in the future.`,
 				defer writer.Flush()
 
 				for _, p := range sites.Pages() {
-					if p.IsFuture() {
-						err := writer.Write([]string{filepath.Join(p.File.Dir(), p.File.LogicalName()), p.PublishDate.Format(time.RFC3339)})
+					if resource.IsFuture(p) {
+						err := writer.Write([]string{filepath.Join(p.File().Dir(), p.File().LogicalName()), p.PublishDate().Format(time.RFC3339)})
 						if err != nil {
 							return newSystemError("Error writing future posts to stdout", err)
 						}
@@ -149,11 +150,12 @@ expired.`,
 				defer writer.Flush()
 
 				for _, p := range sites.Pages() {
-					if p.IsExpired() {
-						err := writer.Write([]string{filepath.Join(p.File.Dir(), p.File.LogicalName()), p.ExpiryDate.Format(time.RFC3339)})
+					if resource.IsExpired(p) {
+						err := writer.Write([]string{filepath.Join(p.File().Dir(), p.File().LogicalName()), p.ExpiryDate().Format(time.RFC3339)})
 						if err != nil {
 							return newSystemError("Error writing expired posts to stdout", err)
 						}
+
 					}
 				}
 
diff --git a/commands/new_content_test.go b/commands/new_content_test.go
index fb8bca7b4cc..5a55094d69b 100644
--- a/commands/new_content_test.go
+++ b/commands/new_content_test.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -62,7 +62,7 @@ func TestDoNewSite_noerror_base_exists_but_empty(t *testing.T) {
 	_, fs := newTestCfg()
 	n := newNewSiteCmd()
 
-	require.NoError(t, fs.Source.MkdirAll(basepath, 777))
+	require.NoError(t, fs.Source.MkdirAll(basepath, 0777))
 
 	require.NoError(t, n.doNewSite(fs, basepath, false))
 }
@@ -72,7 +72,7 @@ func TestDoNewSite_error_base_exists(t *testing.T) {
 	_, fs := newTestCfg()
 	n := newNewSiteCmd()
 
-	require.NoError(t, fs.Source.MkdirAll(basepath, 777))
+	require.NoError(t, fs.Source.MkdirAll(basepath, 0777))
 	_, err := fs.Source.Create(filepath.Join(basepath, "foo"))
 	require.NoError(t, err)
 	// Since the directory already exists and isn't empty, expect an error
@@ -85,7 +85,7 @@ func TestDoNewSite_force_empty_dir(t *testing.T) {
 	_, fs := newTestCfg()
 	n := newNewSiteCmd()
 
-	require.NoError(t, fs.Source.MkdirAll(basepath, 777))
+	require.NoError(t, fs.Source.MkdirAll(basepath, 0777))
 
 	require.NoError(t, n.doNewSite(fs, basepath, true))
 
@@ -99,7 +99,7 @@ func TestDoNewSite_error_force_dir_inside_exists(t *testing.T) {
 
 	contentPath := filepath.Join(basepath, "content")
 
-	require.NoError(t, fs.Source.MkdirAll(contentPath, 777))
+	require.NoError(t, fs.Source.MkdirAll(contentPath, 0777))
 	require.Error(t, n.doNewSite(fs, basepath, true))
 }
 
@@ -109,7 +109,7 @@ func TestDoNewSite_error_force_config_inside_exists(t *testing.T) {
 	n := newNewSiteCmd()
 
 	configPath := filepath.Join(basepath, "config.toml")
-	require.NoError(t, fs.Source.MkdirAll(basepath, 777))
+	require.NoError(t, fs.Source.MkdirAll(basepath, 0777))
 	_, err := fs.Source.Create(configPath)
 	require.NoError(t, err)
 
diff --git a/commands/server.go b/commands/server.go
index c2bd76dae54..5d50ebe2cf0 100644
--- a/commands/server.go
+++ b/commands/server.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -358,7 +358,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, string, string, erro
 						if err := f.c.partialReRender(p); err != nil {
 							f.c.handleBuildErr(err, fmt.Sprintf("Failed to render %q", p))
 							if f.c.showErrorInBrowser {
-								http.Redirect(w, r, p, 301)
+								http.Redirect(w, r, p, http.StatusMovedPermanently)
 								return
 							}
 						}
@@ -386,7 +386,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, string, string, erro
 	return mu, u.String(), endpoint, nil
 }
 
-var logErrorRe = regexp.MustCompile("(?s)ERROR \\d{4}/\\d{2}/\\d{2} \\d{2}:\\d{2}:\\d{2} ")
+var logErrorRe = regexp.MustCompile(`(?s)ERROR \d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} `)
 
 func removeErrorPrefixFromLog(content string) string {
 	return logErrorRe.ReplaceAllLiteralString(content, "")
@@ -403,7 +403,7 @@ func (c *commandeer) serve(s *serverCmd) error {
 	if isMultiHost {
 		for _, s := range c.hugo.Sites {
 			baseURLs = append(baseURLs, s.BaseURL.String())
-			roots = append(roots, s.Language.Lang)
+			roots = append(roots, s.Language().Lang)
 		}
 	} else {
 		s := c.hugo.Sites[0]
@@ -430,7 +430,7 @@ func (c *commandeer) serve(s *serverCmd) error {
 		livereload.Initialize()
 	}
 
-	var sigs = make(chan os.Signal)
+	var sigs = make(chan os.Signal, 1)
 	signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
 
 	for i := range baseURLs {
diff --git a/common/collections/append.go b/common/collections/append.go
index b9a9419cb62..ee15fef7da6 100644
--- a/common/collections/append.go
+++ b/common/collections/append.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -92,9 +92,7 @@ func appendToInterfaceSlice(tov reflect.Value, from ...interface{}) ([]interface
 		tos = append(tos, tov.Index(i).Interface())
 	}
 
-	for _, v := range from {
-		tos = append(tos, v)
-	}
+	tos = append(tos, from...)
 
 	return tos, nil
 }
diff --git a/common/collections/slice_test.go b/common/collections/slice_test.go
index 1103e2feaa2..fd8eb24f1bd 100644
--- a/common/collections/slice_test.go
+++ b/common/collections/slice_test.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -75,11 +75,11 @@ func (p *tstSlicerIn2) Slice(in interface{}) (interface{}, error) {
 }
 
 func (p *tstSlicerIn1) Name() string {
-	return p.Name()
+	return p.name
 }
 
 func (p *tstSlicerIn2) Name() string {
-	return p.Name()
+	return p.name
 }
 
 func (p *tstSlicer) Slice(in interface{}) (interface{}, error) {
diff --git a/common/hugio/readers.go b/common/hugio/readers.go
index ba55e2d08da..8c901dd2409 100644
--- a/common/hugio/readers.go
+++ b/common/hugio/readers.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -32,6 +32,7 @@ type ReadSeekCloser interface {
 }
 
 // ReadSeekerNoOpCloser implements ReadSeekCloser by doing nothing in Close.
+// TODO(bep) rename this and simila to ReadSeekerNopCloser, naming used in stdlib, which kind of makes sense.
 type ReadSeekerNoOpCloser struct {
 	ReadSeeker
 }
diff --git a/common/maps/scratch.go b/common/maps/scratch.go
index 2972e202200..4acd10c6cb2 100644
--- a/common/maps/scratch.go
+++ b/common/maps/scratch.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -28,6 +28,24 @@ type Scratch struct {
 	mu     sync.RWMutex
 }
 
+// Scratcher provides a scratching service.
+type Scratcher interface {
+	Scratch() *Scratch
+}
+
+type scratcher struct {
+	s *Scratch
+}
+
+func (s scratcher) Scratch() *Scratch {
+	return s.s
+}
+
+// NewScratcher creates a new Scratcher.
+func NewScratcher() Scratcher {
+	return scratcher{s: NewScratch()}
+}
+
 // Add will, for single values, add (using the + operator) the addend to the existing addend (if found).
 // Supports numeric values and strings.
 //
diff --git a/common/types/types.go b/common/types/types.go
index 95e72d99b6c..f03031439e6 100644
--- a/common/types/types.go
+++ b/common/types/types.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -16,6 +16,7 @@ package types
 
 import (
 	"fmt"
+	"reflect"
 
 	"github.com/spf13/cast"
 )
@@ -56,3 +57,24 @@ func NewKeyValuesStrings(key string, values ...string) KeyValues {
 type Zeroer interface {
 	IsZero() bool
 }
+
+// IsNil reports whether v is nil.
+func IsNil(v interface{}) bool {
+	if v == nil {
+		return true
+	}
+
+	value := reflect.ValueOf(v)
+	switch value.Kind() {
+	case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
+		return value.IsNil()
+	}
+
+	return false
+}
+
+// DevMarker is a marker interface for types that should only be used during
+// development.
+type DevMarker interface {
+	DevOnly()
+}
diff --git a/config/configProvider.go b/config/configProvider.go
index bc0dd950d7a..31914c38bc5 100644
--- a/config/configProvider.go
+++ b/config/configProvider.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -40,3 +40,15 @@ func GetStringSlicePreserveString(cfg Provider, key string) []string {
 	}
 	return cast.ToStringSlice(sd)
 }
+
+// SetBaseTestDefaults provides some common config defaults used in tests.
+func SetBaseTestDefaults(cfg Provider) {
+	cfg.Set("resourceDir", "resources")
+	cfg.Set("contentDir", "content")
+	cfg.Set("dataDir", "data")
+	cfg.Set("i18nDir", "i18n")
+	cfg.Set("layoutDir", "layouts")
+	cfg.Set("assetDir", "assets")
+	cfg.Set("archetypeDir", "archetypes")
+	cfg.Set("publishDir", "public")
+}
diff --git a/config/services/servicesConfig.go b/config/services/servicesConfig.go
index 7306f527483..559848f5c02 100644
--- a/config/services/servicesConfig.go
+++ b/config/services/servicesConfig.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -23,6 +23,7 @@ const (
 
 	disqusShortnameKey = "disqusshortname"
 	googleAnalyticsKey = "googleanalytics"
+	rssLimitKey        = "rssLimit"
 )
 
 // Config is a privacy configuration for all the relevant services in Hugo.
@@ -31,6 +32,7 @@ type Config struct {
 	GoogleAnalytics GoogleAnalytics
 	Instagram       Instagram
 	Twitter         Twitter
+	RSS             RSS
 }
 
 // Disqus holds the functional configuration settings related to the Disqus template.
@@ -61,6 +63,12 @@ type Twitter struct {
 	DisableInlineCSS bool
 }
 
+// RSS holds the functional configuration settings related to the RSS feeds.
+type RSS struct {
+	// Limit the number of pages.
+	Limit int
+}
+
 // DecodeConfig creates a services Config from a given Hugo configuration.
 func DecodeConfig(cfg config.Provider) (c Config, err error) {
 	m := cfg.GetStringMap(servicesConfigKey)
@@ -76,5 +84,9 @@ func DecodeConfig(cfg config.Provider) (c Config, err error) {
 		c.Disqus.Shortname = cfg.GetString(disqusShortnameKey)
 	}
 
+	if c.RSS.Limit == 0 {
+		c.RSS.Limit = cfg.GetInt(rssLimitKey)
+	}
+
 	return
 }
diff --git a/hugolib/sitemap.go b/config/sitemap.go
similarity index 73%
rename from hugolib/sitemap.go
rename to config/sitemap.go
index 64d6f5b7a75..4031b7ec115 100644
--- a/hugolib/sitemap.go
+++ b/config/sitemap.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -11,7 +11,7 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package hugolib
+package config
 
 import (
 	"github.com/spf13/cast"
@@ -25,21 +25,20 @@ type Sitemap struct {
 	Filename   string
 }
 
-func parseSitemap(input map[string]interface{}) Sitemap {
-	sitemap := Sitemap{Priority: -1, Filename: "sitemap.xml"}
+func DecodeSitemap(prototype Sitemap, input map[string]interface{}) Sitemap {
 
 	for key, value := range input {
 		switch key {
 		case "changefreq":
-			sitemap.ChangeFreq = cast.ToString(value)
+			prototype.ChangeFreq = cast.ToString(value)
 		case "priority":
-			sitemap.Priority = cast.ToFloat64(value)
+			prototype.Priority = cast.ToFloat64(value)
 		case "filename":
-			sitemap.Filename = cast.ToString(value)
+			prototype.Filename = cast.ToString(value)
 		default:
 			jww.WARN.Printf("Unknown Sitemap field: %s\n", key)
 		}
 	}
 
-	return sitemap
+	return prototype
 }
diff --git a/create/content.go b/create/content.go
index 31b7b2e4d70..264a0f3aca3 100644
--- a/create/content.go
+++ b/create/content.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -50,7 +50,7 @@ func NewContent(
 
 	if isDir {
 
-		langFs := hugofs.NewLanguageFs(s.Language.Lang, sites.LanguageSet(), archetypeFs)
+		langFs := hugofs.NewLanguageFs(s.Language().Lang, sites.LanguageSet(), archetypeFs)
 
 		cm, err := mapArcheTypeDir(ps, langFs, archetypeFilename)
 		if err != nil {
@@ -113,7 +113,7 @@ func NewContent(
 
 func targetSite(sites *hugolib.HugoSites, fi *hugofs.LanguageFileInfo) *hugolib.Site {
 	for _, s := range sites.Sites {
-		if fi.Lang() == s.Language.Lang {
+		if fi.Lang() == s.Language().Lang {
 			return s
 		}
 	}
@@ -245,7 +245,7 @@ func resolveContentPath(sites *hugolib.HugoSites, fs afero.Fs, targetPath string
 
 	// Try the filename: my-post.en.md
 	for _, ss := range sites.Sites {
-		if strings.Contains(targetPath, "."+ss.Language.Lang+".") {
+		if strings.Contains(targetPath, "."+ss.Language().Lang+".") {
 			s = ss
 			break
 		}
diff --git a/deps/deps.go b/deps/deps.go
index 628019961bc..47159d017c2 100644
--- a/deps/deps.go
+++ b/deps/deps.go
@@ -7,13 +7,14 @@ import (
 	"github.com/pkg/errors"
 
 	"github.com/gohugoio/hugo/cache/filecache"
-	"github.com/gohugoio/hugo/common/hugo"
 	"github.com/gohugoio/hugo/common/loggers"
 	"github.com/gohugoio/hugo/config"
 	"github.com/gohugoio/hugo/helpers"
 	"github.com/gohugoio/hugo/hugofs"
 	"github.com/gohugoio/hugo/langs"
 	"github.com/gohugoio/hugo/media"
+	"github.com/gohugoio/hugo/resources/page"
+
 	"github.com/gohugoio/hugo/metrics"
 	"github.com/gohugoio/hugo/output"
 	"github.com/gohugoio/hugo/resources"
@@ -67,7 +68,7 @@ type Deps struct {
 	Language *langs.Language
 
 	// The site building.
-	Site hugo.Site
+	Site page.Site
 
 	// All the output formats available for the current site.
 	OutputFormatsConfig output.Formats
@@ -325,7 +326,7 @@ type DepsCfg struct {
 	Language *langs.Language
 
 	// The Site in use
-	Site hugo.Site
+	Site page.Site
 
 	// The configuration to use.
 	Cfg config.Provider
diff --git a/docs/content/en/variables/page.md b/docs/content/en/variables/page.md
index 9dcbdcc435e..c4ddc820040 100644
--- a/docs/content/en/variables/page.md
+++ b/docs/content/en/variables/page.md
@@ -79,8 +79,7 @@ See [`.Scratch`](/functions/scratch/) for page-scoped, writable variables.
 : the page's *kind*. Possible return values are `page`, `home`, `section`, `taxonomy`, or `taxonomyTerm`. Note that there are also `RSS`, `sitemap`, `robotsTXT`, and `404` kinds, but these are only available during the rendering of each of these respective page's kind and therefore *not* available in any of the `Pages` collections.
 
 .Language
-: a language object that points to the language's definition in the site
-`config`.
+: a language object that points to the language's definition in the site `config`. `.Language.Lang` gives you the language code.
 
 .Lastmod
 : the date the content was last modified. `.Lastmod` pulls from the `lastmod` field in a content's front matter.
@@ -93,10 +92,7 @@ See also `.ExpiryDate`, `.Date`, `.PublishDate`, and [`.GitInfo`][gitinfo].
 .LinkTitle
 : access when creating links to the content. If set, Hugo will use the `linktitle` from the front matter before `title`.
 
-.Next (deprecated)
-: In older Hugo versions this pointer went the wrong direction. Please use `.PrevPage` instead.
-
-.NextPage
+.Next
 : Pointer to the next [regular page](/variables/site/#site-pages) (sorted by Hugo's [default sort](/templates/lists#default-weight-date-linktitle-filepath)). Example: `{{if .NextPage}}{{.NextPage.Permalink}}{{end}}`.
 
 .NextInSection
@@ -119,9 +115,6 @@ See also `.ExpiryDate`, `.Date`, `.PublishDate`, and [`.GitInfo`][gitinfo].
 : the Page content stripped of HTML as a `[]string` using Go's [`strings.Fields`](https://golang.org/pkg/strings/#Fields) to split `.Plain` into a slice.
 
 .Prev (deprecated)
-: In older Hugo versions this pointer went the wrong direction. Please use `.NextPage` instead.
-
-.PrevPage
 : Pointer to the previous [regular page](/variables/site/#site-pages) (sorted by Hugo's [default sort](/templates/lists#default-weight-date-linktitle-filepath)). Example: `{{if .PrevPage}}{{.PrevPage.Permalink}}{{end}}`.
 
 .PrevInSection
@@ -130,8 +123,8 @@ See also `.ExpiryDate`, `.Date`, `.PublishDate`, and [`.GitInfo`][gitinfo].
 .PublishDate
 : the date on which the content was or will be published; `.Publishdate` pulls from the `publishdate` field in a content's front matter. See also `.ExpiryDate`, `.Date`, and `.Lastmod`.
 
-.RSSLink
-: link to the taxonomies' RSS link.
+.RSSLink (deprecated)
+: link to the page's RSS feed. This is deprecated. You should instead do something like this: `{{ with .OutputFormats.Get "RSS" }}{{ . RelPermalink }}{{ end }}`.
 
 .RawContent
 : raw markdown content without the front matter. Useful with [remarkjs.com](
diff --git a/go.mod b/go.mod
index 63228a46d8f..44520ca6130 100644
--- a/go.mod
+++ b/go.mod
@@ -44,7 +44,6 @@ require (
 	github.com/spf13/cobra v0.0.3
 	github.com/spf13/fsync v0.0.0-20170320142552-12a01e648f05
 	github.com/spf13/jwalterweatherman v1.1.0
-	github.com/spf13/nitro v0.0.0-20131003134307-24d7ef30a12d
 	github.com/spf13/pflag v1.0.3
 	github.com/spf13/viper v1.3.2
 	github.com/stretchr/testify v1.3.0
diff --git a/go.sum b/go.sum
index b0646e88cfb..b68f6306858 100644
--- a/go.sum
+++ b/go.sum
@@ -126,8 +126,6 @@ github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9
 github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
 github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
 github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
-github.com/spf13/nitro v0.0.0-20131003134307-24d7ef30a12d h1:ihvj2nmx8eqWjlgNgdW6h0DyGJuq5GiwHadJkG0wXtQ=
-github.com/spf13/nitro v0.0.0-20131003134307-24d7ef30a12d/go.mod h1:jU8A+8xL+6n1OX4XaZtCj4B3mIa64tULUsD6YegdpFo=
 github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
 github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
 github.com/spf13/viper v1.3.2 h1:VUFqw5KcqRf7i70GOzW7N+Q7+gxVBkSSqiXB12+JQ4M=
diff --git a/helpers/content.go b/helpers/content.go
index 644942cb144..bc19f655947 100644
--- a/helpers/content.go
+++ b/helpers/content.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -57,7 +57,7 @@ type ContentSpec struct {
 	Highlight            func(code, lang, optsStr string) (string, error)
 	defatultPygmentsOpts map[string]string
 
-	cfg config.Provider
+	Cfg config.Provider
 }
 
 // NewContentSpec returns a ContentSpec initialized
@@ -73,7 +73,7 @@ func NewContentSpec(cfg config.Provider) (*ContentSpec, error) {
 		BuildExpired:               cfg.GetBool("buildExpired"),
 		BuildDrafts:                cfg.GetBool("buildDrafts"),
 
-		cfg: cfg,
+		Cfg: cfg,
 	}
 
 	// Highlighting setup
@@ -382,7 +382,7 @@ func (c *ContentSpec) getMmarkHTMLRenderer(defaultFlags int, ctx *RenderingConte
 	return &HugoMmarkHTMLRenderer{
 		cs:       c,
 		Renderer: mmark.HtmlRendererWithParameters(htmlFlags, "", "", renderParameters),
-		Cfg:      c.cfg,
+		Cfg:      c.Cfg,
 	}
 }
 
diff --git a/helpers/content_renderer_test.go b/helpers/content_renderer_test.go
index a01014b4eb3..f542d5d5400 100644
--- a/helpers/content_renderer_test.go
+++ b/helpers/content_renderer_test.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -24,7 +24,7 @@ import (
 
 // Renders a codeblock using Blackfriday
 func (c ContentSpec) render(input string) string {
-	ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
+	ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
 	render := c.getHTMLRenderer(0, ctx)
 
 	buf := &bytes.Buffer{}
@@ -34,7 +34,7 @@ func (c ContentSpec) render(input string) string {
 
 // Renders a codeblock using Mmark
 func (c ContentSpec) renderWithMmark(input string) string {
-	ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
+	ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
 	render := c.getMmarkHTMLRenderer(0, ctx)
 
 	buf := &bytes.Buffer{}
diff --git a/helpers/content_test.go b/helpers/content_test.go
index 5297df2de2a..1dd4a2fb83b 100644
--- a/helpers/content_test.go
+++ b/helpers/content_test.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -181,7 +181,7 @@ func TestTruncateWordsByRune(t *testing.T) {
 
 func TestGetHTMLRendererFlags(t *testing.T) {
 	c := newTestContentSpec()
-	ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
+	ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
 	renderer := c.getHTMLRenderer(blackfriday.HTML_USE_XHTML, ctx)
 	flags := renderer.GetFlags()
 	if flags&blackfriday.HTML_USE_XHTML != blackfriday.HTML_USE_XHTML {
@@ -210,7 +210,7 @@ func TestGetHTMLRendererAllFlags(t *testing.T) {
 		{blackfriday.HTML_SMARTYPANTS_LATEX_DASHES},
 	}
 	defaultFlags := blackfriday.HTML_USE_XHTML
-	ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
+	ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
 	ctx.Config.AngledQuotes = true
 	ctx.Config.Fractions = true
 	ctx.Config.HrefTargetBlank = true
@@ -235,7 +235,7 @@ func TestGetHTMLRendererAllFlags(t *testing.T) {
 
 func TestGetHTMLRendererAnchors(t *testing.T) {
 	c := newTestContentSpec()
-	ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
+	ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
 	ctx.DocumentID = "testid"
 	ctx.Config.PlainIDAnchors = false
 
@@ -259,7 +259,7 @@ func TestGetHTMLRendererAnchors(t *testing.T) {
 
 func TestGetMmarkHTMLRenderer(t *testing.T) {
 	c := newTestContentSpec()
-	ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
+	ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
 	ctx.DocumentID = "testid"
 	ctx.Config.PlainIDAnchors = false
 	actualRenderer := c.getMmarkHTMLRenderer(0, ctx)
@@ -283,7 +283,7 @@ func TestGetMmarkHTMLRenderer(t *testing.T) {
 
 func TestGetMarkdownExtensionsMasksAreRemovedFromExtensions(t *testing.T) {
 	c := newTestContentSpec()
-	ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
+	ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
 	ctx.Config.Extensions = []string{"headerId"}
 	ctx.Config.ExtensionsMask = []string{"noIntraEmphasis"}
 
@@ -298,7 +298,7 @@ func TestGetMarkdownExtensionsByDefaultAllExtensionsAreEnabled(t *testing.T) {
 		testFlag int
 	}
 	c := newTestContentSpec()
-	ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
+	ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
 	ctx.Config.Extensions = []string{""}
 	ctx.Config.ExtensionsMask = []string{""}
 	allExtensions := []data{
@@ -330,7 +330,7 @@ func TestGetMarkdownExtensionsByDefaultAllExtensionsAreEnabled(t *testing.T) {
 
 func TestGetMarkdownExtensionsAddingFlagsThroughRenderingContext(t *testing.T) {
 	c := newTestContentSpec()
-	ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
+	ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
 	ctx.Config.Extensions = []string{"definitionLists"}
 	ctx.Config.ExtensionsMask = []string{""}
 
@@ -342,7 +342,7 @@ func TestGetMarkdownExtensionsAddingFlagsThroughRenderingContext(t *testing.T) {
 
 func TestGetMarkdownRenderer(t *testing.T) {
 	c := newTestContentSpec()
-	ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
+	ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
 	ctx.Content = []byte("testContent")
 	actualRenderedMarkdown := c.markdownRender(ctx)
 	expectedRenderedMarkdown := []byte("<p>testContent</p>\n")
@@ -353,7 +353,7 @@ func TestGetMarkdownRenderer(t *testing.T) {
 
 func TestGetMarkdownRendererWithTOC(t *testing.T) {
 	c := newTestContentSpec()
-	ctx := &RenderingContext{RenderTOC: true, Cfg: c.cfg, Config: c.BlackFriday}
+	ctx := &RenderingContext{RenderTOC: true, Cfg: c.Cfg, Config: c.BlackFriday}
 	ctx.Content = []byte("testContent")
 	actualRenderedMarkdown := c.markdownRender(ctx)
 	expectedRenderedMarkdown := []byte("<nav>\n</nav>\n\n<p>testContent</p>\n")
@@ -368,7 +368,7 @@ func TestGetMmarkExtensions(t *testing.T) {
 		testFlag int
 	}
 	c := newTestContentSpec()
-	ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
+	ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
 	ctx.Config.Extensions = []string{"tables"}
 	ctx.Config.ExtensionsMask = []string{""}
 	allExtensions := []data{
@@ -397,7 +397,7 @@ func TestGetMmarkExtensions(t *testing.T) {
 
 func TestMmarkRender(t *testing.T) {
 	c := newTestContentSpec()
-	ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
+	ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
 	ctx.Content = []byte("testContent")
 	actualRenderedMarkdown := c.mmarkRender(ctx)
 	expectedRenderedMarkdown := []byte("<p>testContent</p>\n")
diff --git a/helpers/general.go b/helpers/general.go
index 00caf1ecc91..962b35bc6c4 100644
--- a/helpers/general.go
+++ b/helpers/general.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -92,7 +92,7 @@ func GuessType(in string) string {
 		return "org"
 	}
 
-	return "unknown"
+	return ""
 }
 
 // FirstUpper returns a string with the first character as upper case.
@@ -325,12 +325,15 @@ func InitLoggers() {
 // The idea is two remove an item in two Hugo releases to give users and theme authors
 // plenty of time to fix their templates.
 func Deprecated(object, item, alternative string, err bool) {
+	if !strings.HasSuffix(alternative, ".") {
+		alternative += "."
+	}
+
 	if err {
 		DistinctErrorLog.Printf("%s's %s is deprecated and will be removed in Hugo %s. %s", object, item, hugo.CurrentVersion.Next().ReleaseVersion(), alternative)
 
 	} else {
-		// Make sure the users see this while avoiding build breakage. This will not lead to an os.Exit(-1)
-		DistinctFeedbackLog.Printf("WARNING: %s's %s is deprecated and will be removed in a future release. %s", object, item, alternative)
+		DistinctWarnLog.Printf("%s's %s is deprecated and will be removed in a future release. %s", object, item, alternative)
 	}
 }
 
diff --git a/helpers/general_test.go b/helpers/general_test.go
index 1279df43948..ed4c3d2c27b 100644
--- a/helpers/general_test.go
+++ b/helpers/general_test.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -42,7 +42,7 @@ func TestGuessType(t *testing.T) {
 		{"html", "html"},
 		{"htm", "html"},
 		{"org", "org"},
-		{"excel", "unknown"},
+		{"excel", ""},
 	} {
 		result := GuessType(this.in)
 		if result != this.expect {
@@ -166,6 +166,27 @@ var containsAdditionalTestData = []struct {
 	{"", []byte(""), false},
 }
 
+func TestSliceToLower(t *testing.T) {
+	t.Parallel()
+	tests := []struct {
+		value    []string
+		expected []string
+	}{
+		{[]string{"a", "b", "c"}, []string{"a", "b", "c"}},
+		{[]string{"a", "B", "c"}, []string{"a", "b", "c"}},
+		{[]string{"A", "B", "C"}, []string{"a", "b", "c"}},
+	}
+
+	for _, test := range tests {
+		res := SliceToLower(test.value)
+		for i, val := range res {
+			if val != test.expected[i] {
+				t.Errorf("Case mismatch. Expected %s, got %s", test.expected[i], res[i])
+			}
+		}
+	}
+}
+
 func TestReaderContains(t *testing.T) {
 	for i, this := range append(containsBenchTestData, containsAdditionalTestData...) {
 		result := ReaderContains(strings.NewReader(this.v1), this.v2)
diff --git a/helpers/path.go b/helpers/path.go
index bf7e3bf9903..de2c9b0a04f 100644
--- a/helpers/path.go
+++ b/helpers/path.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -86,6 +86,13 @@ func (p *PathSpec) MakePath(s string) string {
 	return p.UnicodeSanitize(s)
 }
 
+// MakePathsSanitized applies MakePathSanitized on every item in the slice
+func (p *PathSpec) MakePathsSanitized(paths []string) {
+	for i, path := range paths {
+		paths[i] = p.MakePathSanitized(path)
+	}
+}
+
 // MakePathSanitized creates a Unicode-sanitized string, with the spaces replaced
 func (p *PathSpec) MakePathSanitized(s string) string {
 	if p.DisablePathToLower {
diff --git a/helpers/pygments.go b/helpers/pygments.go
index 4a90e353ded..64c5b3ea8ed 100644
--- a/helpers/pygments.go
+++ b/helpers/pygments.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -56,7 +56,7 @@ type highlighters struct {
 }
 
 func newHiglighters(cs *ContentSpec) highlighters {
-	return highlighters{cs: cs, ignoreCache: cs.cfg.GetBool("ignoreCache"), cacheDir: cs.cfg.GetString("cacheDir")}
+	return highlighters{cs: cs, ignoreCache: cs.Cfg.GetBool("ignoreCache"), cacheDir: cs.Cfg.GetString("cacheDir")}
 }
 
 func (h highlighters) chromaHighlight(code, lang, optsStr string) (string, error) {
diff --git a/htesting/test_structs.go b/htesting/test_structs.go
index f5aa6ff2513..72dc7f3fc63 100644
--- a/htesting/test_structs.go
+++ b/htesting/test_structs.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -14,8 +14,13 @@
 package htesting
 
 import (
+	"html/template"
+	"time"
+
 	"github.com/gohugoio/hugo/common/hugo"
 	"github.com/gohugoio/hugo/langs"
+	"github.com/gohugoio/hugo/navigation"
+	"github.com/gohugoio/hugo/resources/page"
 	"github.com/spf13/viper"
 )
 
@@ -28,6 +33,22 @@ func (t testSite) Hugo() hugo.Info {
 	return t.h
 }
 
+func (t testSite) ServerPort() int {
+	return 1313
+}
+
+func (testSite) LastChange() (t time.Time) {
+	return
+}
+
+func (t testSite) Title() string {
+	return "foo"
+}
+
+func (t testSite) Sites() page.Sites {
+	return nil
+}
+
 func (t testSite) IsServer() bool {
 	return false
 }
@@ -36,8 +57,36 @@ func (t testSite) Language() *langs.Language {
 	return t.l
 }
 
+func (t testSite) Pages() page.Pages {
+	return nil
+}
+
+func (t testSite) RegularPages() page.Pages {
+	return nil
+}
+
+func (t testSite) Menus() navigation.Menus {
+	return nil
+}
+
+func (t testSite) Taxonomies() interface{} {
+	return nil
+}
+
+func (t testSite) BaseURL() template.URL {
+	return ""
+}
+
+func (t testSite) Params() map[string]interface{} {
+	return nil
+}
+
+func (t testSite) Data() map[string]interface{} {
+	return nil
+}
+
 // NewTestHugoSite creates a new minimal test site.
-func NewTestHugoSite() hugo.Site {
+func NewTestHugoSite() page.Site {
 	return testSite{
 		h: hugo.NewInfo(hugo.EnvironmentProduction),
 		l: langs.NewLanguage("en", newTestConfig()),
diff --git a/hugofs/createcounting_fs.go b/hugofs/createcounting_fs.go
new file mode 100644
index 00000000000..802806b7af1
--- /dev/null
+++ b/hugofs/createcounting_fs.go
@@ -0,0 +1,99 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+	"fmt"
+	"os"
+	"sort"
+	"strings"
+	"sync"
+
+	"github.com/spf13/afero"
+)
+
+// Reseter is implemented by some of the stateful filesystems.
+type Reseter interface {
+	Reset()
+}
+
+// DuplicatesReporter reports about duplicate filenames.
+type DuplicatesReporter interface {
+	ReportDuplicates() string
+}
+
+func NewCreateCountingFs(fs afero.Fs) afero.Fs {
+	return &createCountingFs{Fs: fs, fileCount: make(map[string]int)}
+}
+
+// ReportDuplicates reports filenames written more than once.
+func (c *createCountingFs) ReportDuplicates() string {
+	c.mu.Lock()
+	defer c.mu.Unlock()
+
+	var dupes []string
+
+	for k, v := range c.fileCount {
+		if v > 1 {
+			dupes = append(dupes, fmt.Sprintf("%s (%d)", k, v))
+		}
+	}
+
+	if len(dupes) == 0 {
+		return ""
+	}
+
+	sort.Strings(dupes)
+
+	return strings.Join(dupes, ", ")
+}
+
+// createCountingFs counts filenames of created files or files opened
+// for writing.
+type createCountingFs struct {
+	afero.Fs
+
+	mu        sync.Mutex
+	fileCount map[string]int
+}
+
+func (c *createCountingFs) Reset() {
+	c.mu.Lock()
+	defer c.mu.Unlock()
+
+	c.fileCount = make(map[string]int)
+}
+
+func (fs *createCountingFs) onCreate(filename string) {
+	fs.mu.Lock()
+	defer fs.mu.Unlock()
+
+	fs.fileCount[filename] = fs.fileCount[filename] + 1
+}
+
+func (fs *createCountingFs) Create(name string) (afero.File, error) {
+	f, err := fs.Fs.Create(name)
+	if err == nil {
+		fs.onCreate(name)
+	}
+	return f, err
+}
+
+func (fs *createCountingFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
+	f, err := fs.Fs.OpenFile(name, flag, perm)
+	if err == nil && isWrite(flag) {
+		fs.onCreate(name)
+	}
+	return f, err
+}
diff --git a/hugofs/fs.go b/hugofs/fs.go
index 52e27bd12f4..38590a64e0a 100644
--- a/hugofs/fs.go
+++ b/hugofs/fs.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -15,6 +15,8 @@
 package hugofs
 
 import (
+	"os"
+
 	"github.com/gohugoio/hugo/config"
 	"github.com/spf13/afero"
 )
@@ -80,3 +82,7 @@ func getWorkingDirFs(base afero.Fs, cfg config.Provider) *afero.BasePathFs {
 
 	return nil
 }
+
+func isWrite(flag int) bool {
+	return flag&os.O_RDWR != 0 || flag&os.O_WRONLY != 0
+}
diff --git a/hugofs/hashing_fs.go b/hugofs/hashing_fs.go
index 2de027ce20e..94a50b96077 100644
--- a/hugofs/hashing_fs.go
+++ b/hugofs/hashing_fs.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -67,10 +67,6 @@ func (fs *md5HashingFs) wrapFile(f afero.File) afero.File {
 	return &hashingFile{File: f, h: md5.New(), hashReceiver: fs.hashReceiver}
 }
 
-func isWrite(flag int) bool {
-	return flag&os.O_RDWR != 0 || flag&os.O_WRONLY != 0
-}
-
 func (fs *md5HashingFs) Name() string {
 	return "md5HashingFs"
 }
diff --git a/hugofs/stacktracer_fs.go b/hugofs/stacktracer_fs.go
new file mode 100644
index 00000000000..d4db164caf8
--- /dev/null
+++ b/hugofs/stacktracer_fs.go
@@ -0,0 +1,70 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+	"fmt"
+	"os"
+	"regexp"
+	"runtime"
+
+	"github.com/gohugoio/hugo/common/types"
+
+	"github.com/spf13/afero"
+)
+
+// Make sure we don't accidently use this in the real Hugo.
+var _ types.DevMarker = (*stacktracerFs)(nil)
+
+// NewStacktracerFs wraps the given fs printing stack traces for file creates
+// matching the given regexp pattern.
+func NewStacktracerFs(fs afero.Fs, pattern string) afero.Fs {
+	return &stacktracerFs{Fs: fs, re: regexp.MustCompile(pattern)}
+}
+
+// stacktracerFs can be used in hard-to-debug development situations where
+// you get some input you don't understand where comes from.
+type stacktracerFs struct {
+	afero.Fs
+
+	// Will print stacktrace for every file creates matching this pattern.
+	re *regexp.Regexp
+}
+
+func (fs *stacktracerFs) DevOnly() {
+}
+
+func (fs *stacktracerFs) onCreate(filename string) {
+	if fs.re.MatchString(filename) {
+		trace := make([]byte, 1500)
+		runtime.Stack(trace, true)
+		fmt.Printf("\n===========\n%q:\n%s\n", filename, trace)
+	}
+}
+
+func (fs *stacktracerFs) Create(name string) (afero.File, error) {
+	f, err := fs.Fs.Create(name)
+	if err == nil {
+		fs.onCreate(name)
+	}
+	return f, err
+}
+
+func (fs *stacktracerFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
+	f, err := fs.Fs.OpenFile(name, flag, perm)
+	if err == nil && isWrite(flag) {
+		fs.onCreate(name)
+	}
+	return f, err
+}
diff --git a/hugolib/alias.go b/hugolib/alias.go
index c44f32dbba1..599821c0ac5 100644
--- a/hugolib/alias.go
+++ b/hugolib/alias.go
@@ -1,4 +1,4 @@
-// Copyright 2017 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -26,6 +26,7 @@ import (
 
 	"github.com/gohugoio/hugo/output"
 	"github.com/gohugoio/hugo/publisher"
+	"github.com/gohugoio/hugo/resources/page"
 	"github.com/gohugoio/hugo/tpl"
 
 	"github.com/gohugoio/hugo/helpers"
@@ -55,7 +56,12 @@ func newAliasHandler(t tpl.TemplateFinder, l *loggers.Logger, allowRoot bool) al
 	return aliasHandler{t, l, allowRoot}
 }
 
-func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (io.Reader, error) {
+type aliasPage struct {
+	Permalink string
+	page.Page
+}
+
+func (a aliasHandler) renderAlias(isXHTML bool, permalink string, p page.Page) (io.Reader, error) {
 	t := "alias"
 	if isXHTML {
 		t = "alias-xhtml"
@@ -75,12 +81,9 @@ func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (i
 		}
 
 	}
-	data := struct {
-		Permalink string
-		Page      *Page
-	}{
+	data := aliasPage{
 		permalink,
-		page,
+		p,
 	}
 
 	buffer := new(bytes.Buffer)
@@ -91,11 +94,11 @@ func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (i
 	return buffer, nil
 }
 
-func (s *Site) writeDestAlias(path, permalink string, outputFormat output.Format, p *Page) (err error) {
+func (s *Site) writeDestAlias(path, permalink string, outputFormat output.Format, p page.Page) (err error) {
 	return s.publishDestAlias(false, path, permalink, outputFormat, p)
 }
 
-func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFormat output.Format, p *Page) (err error) {
+func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFormat output.Format, p page.Page) (err error) {
 	handler := newAliasHandler(s.Tmpl, s.Log, allowRoot)
 
 	isXHTML := strings.HasSuffix(path, ".xhtml")
@@ -126,19 +129,19 @@ func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFo
 func (a aliasHandler) targetPathAlias(src string) (string, error) {
 	originalAlias := src
 	if len(src) <= 0 {
-		return "", fmt.Errorf("Alias \"\" is an empty string")
+		return "", fmt.Errorf("alias \"\" is an empty string")
 	}
 
 	alias := filepath.Clean(src)
 	components := strings.Split(alias, helpers.FilePathSeparator)
 
 	if !a.allowRoot && alias == helpers.FilePathSeparator {
-		return "", fmt.Errorf("Alias \"%s\" resolves to website root directory", originalAlias)
+		return "", fmt.Errorf("alias \"%s\" resolves to website root directory", originalAlias)
 	}
 
 	// Validate against directory traversal
 	if components[0] == ".." {
-		return "", fmt.Errorf("Alias \"%s\" traverses outside the website root directory", originalAlias)
+		return "", fmt.Errorf("alias \"%s\" traverses outside the website root directory", originalAlias)
 	}
 
 	// Handle Windows file and directory naming restrictions
@@ -171,7 +174,7 @@ func (a aliasHandler) targetPathAlias(src string) (string, error) {
 			for _, m := range msgs {
 				a.log.ERROR.Println(m)
 			}
-			return "", fmt.Errorf("Cannot create \"%s\": Windows filename restriction", originalAlias)
+			return "", fmt.Errorf("cannot create \"%s\": Windows filename restriction", originalAlias)
 		}
 		for _, m := range msgs {
 			a.log.INFO.Println(m)
diff --git a/hugolib/alias_test.go b/hugolib/alias_test.go
index da1b80b7007..684e35c9a2c 100644
--- a/hugolib/alias_test.go
+++ b/hugolib/alias_test.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -50,7 +50,7 @@ func TestAlias(t *testing.T) {
 	b.CreateSites().Build(BuildCfg{})
 
 	assert.Equal(1, len(b.H.Sites))
-	require.Len(t, b.H.Sites[0].RegularPages, 1)
+	require.Len(t, b.H.Sites[0].RegularPages(), 1)
 
 	// the real page
 	b.AssertFileContent("public/page/index.html", "For some moments the old man")
diff --git a/hugolib/collections.go b/hugolib/collections.go
index cf75d373221..a794a986610 100644
--- a/hugolib/collections.go
+++ b/hugolib/collections.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -14,19 +14,13 @@
 package hugolib
 
 import (
-	"fmt"
-
-	"github.com/gohugoio/hugo/resources/resource"
-
 	"github.com/gohugoio/hugo/common/collections"
+	"github.com/gohugoio/hugo/resources/page"
 )
 
 var (
-	_ collections.Grouper         = (*Page)(nil)
-	_ collections.Slicer          = (*Page)(nil)
-	_ collections.Slicer          = PageGroup{}
-	_ collections.Slicer          = WeightedPage{}
-	_ resource.ResourcesConverter = Pages{}
+	_ collections.Grouper = (*pageState)(nil)
+	_ collections.Slicer  = (*pageState)(nil)
 )
 
 // collections.Slicer implementations below. We keep these bridge implementations
@@ -35,50 +29,8 @@ var (
 
 // Slice is not meant to be used externally. It's a bridge function
 // for the template functions. See collections.Slice.
-func (p *Page) Slice(items interface{}) (interface{}, error) {
-	return toPages(items)
-}
-
-// Slice is not meant to be used externally. It's a bridge function
-// for the template functions. See collections.Slice.
-func (p PageGroup) Slice(in interface{}) (interface{}, error) {
-	switch items := in.(type) {
-	case PageGroup:
-		return items, nil
-	case []interface{}:
-		groups := make(PagesGroup, len(items))
-		for i, v := range items {
-			g, ok := v.(PageGroup)
-			if !ok {
-				return nil, fmt.Errorf("type %T is not a PageGroup", v)
-			}
-			groups[i] = g
-		}
-		return groups, nil
-	default:
-		return nil, fmt.Errorf("invalid slice type %T", items)
-	}
-}
-
-// Slice is not meant to be used externally. It's a bridge function
-// for the template functions. See collections.Slice.
-func (p WeightedPage) Slice(in interface{}) (interface{}, error) {
-	switch items := in.(type) {
-	case WeightedPages:
-		return items, nil
-	case []interface{}:
-		weighted := make(WeightedPages, len(items))
-		for i, v := range items {
-			g, ok := v.(WeightedPage)
-			if !ok {
-				return nil, fmt.Errorf("type %T is not a WeightedPage", v)
-			}
-			weighted[i] = g
-		}
-		return weighted, nil
-	default:
-		return nil, fmt.Errorf("invalid slice type %T", items)
-	}
+func (p *pageState) Slice(items interface{}) (interface{}, error) {
+	return page.ToPages(items)
 }
 
 // collections.Grouper  implementations below
@@ -86,19 +38,10 @@ func (p WeightedPage) Slice(in interface{}) (interface{}, error) {
 // Group creates a PageGroup from a key and a Pages object
 // This method is not meant for external use. It got its non-typed arguments to satisfy
 // a very generic interface in the tpl package.
-func (p *Page) Group(key interface{}, in interface{}) (interface{}, error) {
-	pages, err := toPages(in)
+func (p *pageState) Group(key interface{}, in interface{}) (interface{}, error) {
+	pages, err := page.ToPages(in)
 	if err != nil {
 		return nil, err
 	}
-	return PageGroup{Key: key, Pages: pages}, nil
-}
-
-// ToResources wraps resource.ResourcesConverter
-func (pages Pages) ToResources() resource.Resources {
-	r := make(resource.Resources, len(pages))
-	for i, p := range pages {
-		r[i] = p
-	}
-	return r
+	return page.PageGroup{Key: key, Pages: pages}, nil
 }
diff --git a/hugolib/collections_test.go b/hugolib/collections_test.go
index 9cf328a05f6..bc55bdbe8c9 100644
--- a/hugolib/collections_test.go
+++ b/hugolib/collections_test.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -40,7 +40,7 @@ title: "Page"
 	b.CreateSites().Build(BuildCfg{})
 
 	assert.Equal(1, len(b.H.Sites))
-	require.Len(t, b.H.Sites[0].RegularPages, 2)
+	require.Len(t, b.H.Sites[0].RegularPages(), 2)
 
 	b.AssertFileContent("public/index.html", "cool: 2")
 }
@@ -79,12 +79,12 @@ tags_weight: %d
 	b.CreateSites().Build(BuildCfg{})
 
 	assert.Equal(1, len(b.H.Sites))
-	require.Len(t, b.H.Sites[0].RegularPages, 2)
+	require.Len(t, b.H.Sites[0].RegularPages(), 2)
 
 	b.AssertFileContent("public/index.html",
-		"pages:2:hugolib.Pages:Page(/page1.md)/Page(/page2.md)",
-		"pageGroups:2:hugolib.PagesGroup:Page(/page1.md)/Page(/page2.md)",
-		`weightedPages:2::hugolib.WeightedPages:[WeightedPage(10,"Page") WeightedPage(20,"Page")]`)
+		"pages:2:page.Pages:Page(/page1.md)/Page(/page2.md)",
+		"pageGroups:2:page.PagesGroup:Page(/page1.md)/Page(/page2.md)",
+		`weightedPages:2::page.WeightedPages:[WeightedPage(10,"Page") WeightedPage(20,"Page")]`)
 }
 
 func TestAppendFunc(t *testing.T) {
@@ -129,11 +129,11 @@ tags_weight: %d
 	b.CreateSites().Build(BuildCfg{})
 
 	assert.Equal(1, len(b.H.Sites))
-	require.Len(t, b.H.Sites[0].RegularPages, 2)
+	require.Len(t, b.H.Sites[0].RegularPages(), 2)
 
 	b.AssertFileContent("public/index.html",
-		"pages:2:hugolib.Pages:Page(/page2.md)/Page(/page1.md)",
-		"appendPages:9:hugolib.Pages:home/page",
+		"pages:2:page.Pages:Page(/page2.md)/Page(/page1.md)",
+		"appendPages:9:page.Pages:home/page",
 		"appendStrings:[]string:[a b c d e]",
 		"appendStringsSlice:[]string:[a b c c d]",
 		"union:[]string:[a b c d e]",
diff --git a/hugolib/config.go b/hugolib/config.go
index 6a1de32beec..50e4ca6ec29 100644
--- a/hugolib/config.go
+++ b/hugolib/config.go
@@ -1,4 +1,4 @@
-// Copyright 2016-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -24,7 +24,6 @@ import (
 
 	"github.com/gohugoio/hugo/common/herrors"
 	"github.com/gohugoio/hugo/common/hugo"
-	"github.com/gohugoio/hugo/hugofs"
 	"github.com/gohugoio/hugo/hugolib/paths"
 	"github.com/pkg/errors"
 	_errors "github.com/pkg/errors"
@@ -177,14 +176,6 @@ type configLoader struct {
 	ConfigSourceDescriptor
 }
 
-func (l configLoader) wrapFileInfoError(err error, fi os.FileInfo) error {
-	rfi, ok := fi.(hugofs.RealFilenameInfo)
-	if !ok {
-		return err
-	}
-	return l.wrapFileError(err, rfi.RealFilename())
-}
-
 func (l configLoader) loadConfig(configName string, v *viper.Viper) (string, error) {
 	baseDir := l.configFileDir()
 	var baseFilename string
@@ -240,11 +231,6 @@ func (l configLoader) wrapFileError(err error, filename string) error {
 	return err
 }
 
-func (l configLoader) newRealBaseFs(path string) afero.Fs {
-	return hugofs.NewBasePathRealFilenameFs(afero.NewBasePathFs(l.Fs, path).(*afero.BasePathFs))
-
-}
-
 func (l configLoader) loadConfigFromConfigDir(v *viper.Viper) ([]string, error) {
 	sourceFs := l.Fs
 	configDir := l.AbsConfigDir
@@ -274,7 +260,7 @@ func (l configLoader) loadConfigFromConfigDir(v *viper.Viper) ([]string, error)
 
 	for _, configDir := range configDirs {
 		err := afero.Walk(sourceFs, configDir, func(path string, fi os.FileInfo, err error) error {
-			if fi == nil {
+			if fi == nil || err != nil {
 				return nil
 			}
 
@@ -616,8 +602,8 @@ func loadDefaultSettingsFor(v *viper.Viper) error {
 	v.SetDefault("removePathAccents", false)
 	v.SetDefault("titleCaseStyle", "AP")
 	v.SetDefault("taxonomies", map[string]string{"tag": "tags", "category": "categories"})
-	v.SetDefault("permalinks", make(PermalinkOverrides, 0))
-	v.SetDefault("sitemap", Sitemap{Priority: -1, Filename: "sitemap.xml"})
+	v.SetDefault("permalinks", make(map[string]string))
+	v.SetDefault("sitemap", config.Sitemap{Priority: -1, Filename: "sitemap.xml"})
 	v.SetDefault("pygmentsStyle", "monokai")
 	v.SetDefault("pygmentsUseClasses", false)
 	v.SetDefault("pygmentsCodeFences", false)
@@ -625,7 +611,6 @@ func loadDefaultSettingsFor(v *viper.Viper) error {
 	v.SetDefault("pygmentsOptions", "")
 	v.SetDefault("disableLiveReload", false)
 	v.SetDefault("pluralizeListTitles", true)
-	v.SetDefault("preserveTaxonomyNames", false)
 	v.SetDefault("forceSyncStatic", false)
 	v.SetDefault("footnoteAnchorPrefix", "")
 	v.SetDefault("footnoteReturnLinkContents", "")
diff --git a/hugolib/datafiles_test.go b/hugolib/datafiles_test.go
index 6685de4cc61..b65183a8ab0 100644
--- a/hugolib/datafiles_test.go
+++ b/hugolib/datafiles_test.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -349,7 +349,7 @@ func doTestDataDirImpl(t *testing.T, dd dataDir, expected interface{}, configKey
 
 	s := buildSingleSiteExpected(t, false, expectBuildError, depsCfg, BuildCfg{SkipRender: true})
 
-	if !expectBuildError && !reflect.DeepEqual(expected, s.Data) {
+	if !expectBuildError && !reflect.DeepEqual(expected, s.h.Data()) {
 		// This disabled code detects the situation described in the WARNING message below.
 		// The situation seems to only occur for TOML data with integer values.
 		// Perhaps the TOML parser returns ints in another type.
@@ -366,7 +366,7 @@ func doTestDataDirImpl(t *testing.T, dd dataDir, expected interface{}, configKey
 			}
 		*/
 
-		return fmt.Sprintf("Expected data:\n%v got\n%v\n\nExpected type structure:\n%#[1]v got\n%#[2]v", expected, s.Data)
+		return fmt.Sprintf("Expected data:\n%v got\n%v\n\nExpected type structure:\n%#[1]v got\n%#[2]v", expected, s.h.Data())
 	}
 
 	return
diff --git a/hugolib/disableKinds_test.go b/hugolib/disableKinds_test.go
index edada141912..f5c093646c2 100644
--- a/hugolib/disableKinds_test.go
+++ b/hugolib/disableKinds_test.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -18,6 +18,8 @@ import (
 
 	"fmt"
 
+	"github.com/gohugoio/hugo/resources/page"
+
 	"github.com/gohugoio/hugo/deps"
 	"github.com/spf13/afero"
 
@@ -33,13 +35,13 @@ func TestDisableKindsNoneDisabled(t *testing.T) {
 
 func TestDisableKindsSomeDisabled(t *testing.T) {
 	t.Parallel()
-	doTestDisableKinds(t, KindSection, kind404)
+	doTestDisableKinds(t, page.KindSection, kind404)
 }
 
 func TestDisableKindsOneDisabled(t *testing.T) {
 	t.Parallel()
 	for _, kind := range allKinds {
-		if kind == KindPage {
+		if kind == page.KindPage {
 			// Turning off regular page generation have some side-effects
 			// not handled by the assertions below (no sections), so
 			// skip that for now.
@@ -124,64 +126,64 @@ func assertDisabledKinds(th testHelper, s *Site, disabled ...string) {
 	assertDisabledKind(th,
 		func(isDisabled bool) bool {
 			if isDisabled {
-				return len(s.RegularPages) == 0
+				return len(s.RegularPages()) == 0
 			}
-			return len(s.RegularPages) > 0
-		}, disabled, KindPage, "public/sect/p1/index.html", "Single|P1")
+			return len(s.RegularPages()) > 0
+		}, disabled, page.KindPage, "public/sect/p1/index.html", "Single|P1")
 	assertDisabledKind(th,
 		func(isDisabled bool) bool {
-			p := s.getPage(KindHome)
+			p := s.getPage(page.KindHome)
 			if isDisabled {
 				return p == nil
 			}
 			return p != nil
-		}, disabled, KindHome, "public/index.html", "Home")
+		}, disabled, page.KindHome, "public/index.html", "Home")
 	assertDisabledKind(th,
 		func(isDisabled bool) bool {
-			p := s.getPage(KindSection, "sect")
+			p := s.getPage(page.KindSection, "sect")
 			if isDisabled {
 				return p == nil
 			}
 			return p != nil
-		}, disabled, KindSection, "public/sect/index.html", "Sects")
+		}, disabled, page.KindSection, "public/sect/index.html", "Sects")
 	assertDisabledKind(th,
 		func(isDisabled bool) bool {
-			p := s.getPage(KindTaxonomy, "tags", "tag1")
+			p := s.getPage(page.KindTaxonomy, "tags", "tag1")
 
 			if isDisabled {
 				return p == nil
 			}
 			return p != nil
 
-		}, disabled, KindTaxonomy, "public/tags/tag1/index.html", "Tag1")
+		}, disabled, page.KindTaxonomy, "public/tags/tag1/index.html", "Tag1")
 	assertDisabledKind(th,
 		func(isDisabled bool) bool {
-			p := s.getPage(KindTaxonomyTerm, "tags")
+			p := s.getPage(page.KindTaxonomyTerm, "tags")
 			if isDisabled {
 				return p == nil
 			}
 			return p != nil
 
-		}, disabled, KindTaxonomyTerm, "public/tags/index.html", "Tags")
+		}, disabled, page.KindTaxonomyTerm, "public/tags/index.html", "Tags")
 	assertDisabledKind(th,
 		func(isDisabled bool) bool {
-			p := s.getPage(KindTaxonomyTerm, "categories")
+			p := s.getPage(page.KindTaxonomyTerm, "categories")
 
 			if isDisabled {
 				return p == nil
 			}
 			return p != nil
 
-		}, disabled, KindTaxonomyTerm, "public/categories/index.html", "Category Terms")
+		}, disabled, page.KindTaxonomyTerm, "public/categories/index.html", "Category Terms")
 	assertDisabledKind(th,
 		func(isDisabled bool) bool {
-			p := s.getPage(KindTaxonomy, "categories", "hugo")
+			p := s.getPage(page.KindTaxonomy, "categories", "hugo")
 			if isDisabled {
 				return p == nil
 			}
 			return p != nil
 
-		}, disabled, KindTaxonomy, "public/categories/hugo/index.html", "Hugo")
+		}, disabled, page.KindTaxonomy, "public/categories/hugo/index.html", "Hugo")
 	// The below have no page in any collection.
 	assertDisabledKind(th, func(isDisabled bool) bool { return true }, disabled, kindRSS, "public/index.xml", "<link>")
 	assertDisabledKind(th, func(isDisabled bool) bool { return true }, disabled, kindSitemap, "public/sitemap.xml", "sitemap")
@@ -195,7 +197,7 @@ func assertDisabledKind(th testHelper, kindAssert func(bool) bool, disabled []st
 
 	if kind == kindRSS && !isDisabled {
 		// If the home page is also disabled, there is not RSS to look for.
-		if stringSliceContains(KindHome, disabled...) {
+		if stringSliceContains(page.KindHome, disabled...) {
 			isDisabled = true
 		}
 	}
diff --git a/hugolib/embedded_shortcodes_test.go b/hugolib/embedded_shortcodes_test.go
index 3a6220b532f..c70380a4b89 100644
--- a/hugolib/embedded_shortcodes_test.go
+++ b/hugolib/embedded_shortcodes_test.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -20,6 +20,8 @@ import (
 	"strings"
 	"testing"
 
+	"github.com/spf13/cast"
+
 	"path/filepath"
 
 	"github.com/gohugoio/hugo/deps"
@@ -67,9 +69,11 @@ func doTestShortcodeCrossrefs(t *testing.T, relative bool) {
 
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
 
-	require.Len(t, s.RegularPages, 1)
+	require.Len(t, s.RegularPages(), 1)
 
-	output := string(s.RegularPages[0].content())
+	content, err := s.RegularPages()[0].Content()
+	require.NoError(t, err)
+	output := cast.ToString(content)
 
 	if !strings.Contains(output, expected) {
 		t.Errorf("Got\n%q\nExpected\n%q", output, expected)
diff --git a/hugolib/gitinfo.go b/hugolib/gitinfo.go
index d356fcf075e..6acc47d173f 100644
--- a/hugolib/gitinfo.go
+++ b/hugolib/gitinfo.go
@@ -1,4 +1,4 @@
-// Copyright 2016-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -19,6 +19,7 @@ import (
 
 	"github.com/bep/gitmap"
 	"github.com/gohugoio/hugo/config"
+	"github.com/gohugoio/hugo/resources/page"
 )
 
 type gitInfo struct {
@@ -26,15 +27,12 @@ type gitInfo struct {
 	repo       *gitmap.GitRepo
 }
 
-func (g *gitInfo) forPage(p *Page) (*gitmap.GitInfo, bool) {
-	if g == nil {
-		return nil, false
-	}
-
-	name := strings.TrimPrefix(filepath.ToSlash(p.Filename()), g.contentDir)
+func (g *gitInfo) forPage(p page.Page) *gitmap.GitInfo {
+	name := strings.TrimPrefix(filepath.ToSlash(p.File().Filename()), g.contentDir)
 	name = strings.TrimPrefix(name, "/")
 
-	return g.repo.Files[name], true
+	return g.repo.Files[name]
+
 }
 
 func newGitInfo(cfg config.Provider) (*gitInfo, error) {
diff --git a/hugolib/hugo_sites.go b/hugolib/hugo_sites.go
index 9ce1c438e75..486ce885094 100644
--- a/hugolib/hugo_sites.go
+++ b/hugolib/hugo_sites.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -14,14 +14,24 @@
 package hugolib
 
 import (
-	"errors"
 	"io"
 	"path/filepath"
 	"sort"
 	"strings"
 	"sync"
 
+	"github.com/gohugoio/hugo/output"
+	"github.com/gohugoio/hugo/parser/metadecoders"
+
+	"github.com/gohugoio/hugo/hugofs"
+
+	"github.com/pkg/errors"
+
+	"github.com/gohugoio/hugo/source"
+
+	"github.com/bep/gitmap"
 	"github.com/gohugoio/hugo/config"
+	"github.com/spf13/afero"
 
 	"github.com/gohugoio/hugo/publisher"
 
@@ -30,8 +40,10 @@ import (
 	"github.com/gohugoio/hugo/deps"
 	"github.com/gohugoio/hugo/helpers"
 	"github.com/gohugoio/hugo/langs"
+	"github.com/gohugoio/hugo/lazy"
 
 	"github.com/gohugoio/hugo/i18n"
+	"github.com/gohugoio/hugo/resources/page"
 	"github.com/gohugoio/hugo/tpl"
 	"github.com/gohugoio/hugo/tpl/tplimpl"
 )
@@ -48,17 +60,96 @@ type HugoSites struct {
 	// If this is running in the dev server.
 	running bool
 
+	// Render output formats for all sites.
+	renderFormats output.Formats
+
 	*deps.Deps
 
+	gitInfo *gitInfo
+
+	// As loaded from the /data dirs
+	data map[string]interface{}
+
 	// Keeps track of bundle directories and symlinks to enable partial rebuilding.
 	ContentChanges *contentChangeMap
 
-	// If enabled, keeps a revision map for all content.
-	gitInfo *gitInfo
+	init *hugoSitesInit
+
+	*fatalErrorHandler
+}
+
+type fatalErrorHandler struct {
+	mu sync.Mutex
+
+	h *HugoSites
+
+	err error
+
+	done  bool
+	donec chan bool // will be closed when done
+}
+
+// FatalError error is used in some rare situations where it does not make sense to
+// continue processing, to abort as soon as possible and log the error.
+func (f *fatalErrorHandler) FatalError(err error) {
+	f.mu.Lock()
+	defer f.mu.Unlock()
+	if !f.done {
+		f.done = true
+		close(f.donec)
+	}
+	f.err = err
 }
 
-func (h *HugoSites) siteInfos() SiteInfos {
-	infos := make(SiteInfos, len(h.Sites))
+func (f *fatalErrorHandler) getErr() error {
+	f.mu.Lock()
+	defer f.mu.Unlock()
+	return f.err
+}
+
+func (f *fatalErrorHandler) Done() <-chan bool {
+	return f.donec
+}
+
+type hugoSitesInit struct {
+	// Loads the data from all of the /data folders.
+	data *lazy.Init
+
+	// Loads the Git info for all the pages if enabled.
+	gitInfo *lazy.Init
+
+	// Maps page translations.
+	translations *lazy.Init
+}
+
+func (h *hugoSitesInit) Reset() {
+	h.data.Reset()
+	h.gitInfo.Reset()
+	h.translations.Reset()
+}
+
+func (h *HugoSites) Data() map[string]interface{} {
+	if _, err := h.init.data.Do(); err != nil {
+		h.SendError(errors.Wrap(err, "failed to load data"))
+		return nil
+	}
+	return h.data
+}
+
+func (h *HugoSites) gitInfoForPage(p page.Page) (*gitmap.GitInfo, error) {
+	if _, err := h.init.gitInfo.Do(); err != nil {
+		return nil, err
+	}
+
+	if h.gitInfo == nil {
+		return nil, nil
+	}
+
+	return h.gitInfo.forPage(p), nil
+}
+
+func (h *HugoSites) siteInfos() page.Sites {
+	infos := make(page.Sites, len(h.Sites))
 	for i, site := range h.Sites {
 		infos[i] = &site.Info
 	}
@@ -106,7 +197,7 @@ func (h *HugoSites) IsMultihost() bool {
 func (h *HugoSites) LanguageSet() map[string]bool {
 	set := make(map[string]bool)
 	for _, s := range h.Sites {
-		set[s.Language.Lang] = true
+		set[s.language.Lang] = true
 	}
 	return set
 }
@@ -129,14 +220,14 @@ func (h *HugoSites) PrintProcessingStats(w io.Writer) {
 func (h *HugoSites) langSite() map[string]*Site {
 	m := make(map[string]*Site)
 	for _, s := range h.Sites {
-		m[s.Language.Lang] = s
+		m[s.language.Lang] = s
 	}
 	return m
 }
 
 // GetContentPage finds a Page with content given the absolute filename.
 // Returns nil if none found.
-func (h *HugoSites) GetContentPage(filename string) *Page {
+func (h *HugoSites) GetContentPage(filename string) page.Page {
 	for _, s := range h.Sites {
 		pos := s.rawAllPages.findPagePosByFilename(filename)
 		if pos == -1 {
@@ -178,10 +269,40 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
 		running:      cfg.Running,
 		multilingual: langConfig,
 		multihost:    cfg.Cfg.GetBool("multihost"),
-		Sites:        sites}
+		Sites:        sites,
+		init: &hugoSitesInit{
+			data:         lazy.New(),
+			gitInfo:      lazy.New(),
+			translations: lazy.New(),
+		},
+	}
+
+	h.fatalErrorHandler = &fatalErrorHandler{
+		h:     h,
+		donec: make(chan bool),
+	}
+
+	h.init.data.Add(func() (interface{}, error) {
+		err := h.loadData(h.PathSpec.BaseFs.Data.Fs)
+		return err, nil
+	})
+
+	h.init.translations.Add(func() (interface{}, error) {
+		if len(h.Sites) > 1 {
+			allTranslations := pagesToTranslationsMap(h.Sites)
+			assignTranslationsToPages(allTranslations, h.Sites)
+		}
+
+		return nil, nil
+	})
+
+	h.init.gitInfo.Add(func() (interface{}, error) {
+		err := h.loadGitInfo()
+		return nil, err
+	})
 
 	for _, s := range sites {
-		s.owner = h
+		s.h = h
 	}
 
 	if err := applyDeps(cfg, sites...); err != nil {
@@ -197,14 +318,10 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
 		h.ContentChanges = contentChangeTracker
 	}
 
-	if err := h.initGitInfo(); err != nil {
-		return nil, err
-	}
-
 	return h, nil
 }
 
-func (h *HugoSites) initGitInfo() error {
+func (h *HugoSites) loadGitInfo() error {
 	if h.Cfg.GetBool("enableGitInfo") {
 		gi, err := newGitInfo(h.Cfg)
 		if err != nil {
@@ -247,16 +364,16 @@ func applyDeps(cfg deps.DepsCfg, sites ...*Site) error {
 
 			d.Site = &s.Info
 
-			siteConfig, err := loadSiteConfig(s.Language)
+			siteConfig, err := loadSiteConfig(s.language)
 			if err != nil {
 				return err
 			}
-			s.siteConfig = siteConfig
-			s.siteRefLinker, err = newSiteRefLinker(s.Language, s)
+			s.siteConfigConfig = siteConfig
+			s.siteRefLinker, err = newSiteRefLinker(s.language, s)
 			return err
 		}
 
-		cfg.Language = s.Language
+		cfg.Language = s.language
 		cfg.MediaTypes = s.mediaTypesConfig
 		cfg.OutputFormats = s.outputFormatsConfig
 
@@ -347,11 +464,23 @@ func createSitesFromConfig(cfg deps.DepsCfg) ([]*Site, error) {
 	return sites, nil
 }
 
-// Reset resets the sites and template caches, making it ready for a full rebuild.
-func (h *HugoSites) reset() {
-	for i, s := range h.Sites {
-		h.Sites[i] = s.reset()
+// Reset resets the sites and template caches etc., making it ready for a full rebuild.
+func (h *HugoSites) reset(config *BuildCfg) {
+	if config.ResetState {
+		for i, s := range h.Sites {
+			h.Sites[i] = s.reset()
+			if r, ok := s.Fs.Destination.(hugofs.Reseter); ok {
+				r.Reset()
+			}
+		}
 	}
+
+	h.fatalErrorHandler = &fatalErrorHandler{
+		h:     h,
+		donec: make(chan bool),
+	}
+
+	h.init.Reset()
 }
 
 // resetLogs resets the log counters etc. Used to do a new build on the same sites.
@@ -387,7 +516,7 @@ func (h *HugoSites) createSitesFromConfig(cfg config.Provider) error {
 	h.Sites = sites
 
 	for _, s := range sites {
-		s.owner = h
+		s.h = h
 	}
 
 	if err := applyDeps(depsCfg, sites...); err != nil {
@@ -435,7 +564,7 @@ type BuildCfg struct {
 // Note that a page does not have to have a content page / file.
 // For regular builds, this will allways return true.
 // TODO(bep) rename/work this.
-func (cfg *BuildCfg) shouldRender(p *Page) bool {
+func (cfg *BuildCfg) shouldRender(p *pageState) bool {
 	if p.forceRender {
 		p.forceRender = false
 		return true
@@ -445,15 +574,8 @@ func (cfg *BuildCfg) shouldRender(p *Page) bool {
 		return true
 	}
 
-	if cfg.RecentlyVisited[p.RelPermalink()] {
-		if cfg.PartialReRender {
-			_ = p.initMainOutputFormat()
-		}
-		return true
-	}
-
-	if cfg.whatChanged != nil && p.File != nil {
-		return cfg.whatChanged.files[p.File.Filename()]
+	if cfg.whatChanged != nil && p.File() != nil {
+		return cfg.whatChanged.files[p.File().Filename()]
 	}
 
 	return false
@@ -477,100 +599,85 @@ func (h *HugoSites) renderCrossSitesArtifacts() error {
 		return nil
 	}
 
-	// TODO(bep) DRY
-	sitemapDefault := parseSitemap(h.Cfg.GetStringMap("sitemap"))
-
 	s := h.Sites[0]
 
 	smLayouts := []string{"sitemapindex.xml", "_default/sitemapindex.xml", "_internal/_default/sitemapindex.xml"}
 
 	return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemapindex",
-		sitemapDefault.Filename, h.toSiteInfos(), smLayouts...)
-}
-
-func (h *HugoSites) assignMissingTranslations() error {
-
-	// This looks heavy, but it should be a small number of nodes by now.
-	allPages := h.findAllPagesByKindNotIn(KindPage)
-	for _, nodeType := range []string{KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm} {
-		nodes := h.findPagesByKindIn(nodeType, allPages)
-
-		// Assign translations
-		for _, t1 := range nodes {
-			for _, t2 := range nodes {
-				if t1.isNewTranslation(t2) {
-					t1.translations = append(t1.translations, t2)
-				}
-			}
-		}
-	}
-
-	// Now we can sort the translations.
-	for _, p := range allPages {
-		if len(p.translations) > 0 {
-			pageBy(languagePageSort).Sort(p.translations)
-		}
-	}
-	return nil
-
+		s.siteCfg.sitemap.Filename, h.toSiteInfos(), smLayouts...)
 }
 
 // createMissingPages creates home page, taxonomies etc. that isnt't created as an
 // effect of having a content file.
 func (h *HugoSites) createMissingPages() error {
-	var newPages Pages
+	var newPages pageStatePages
 
 	for _, s := range h.Sites {
-		if s.isEnabled(KindHome) {
+		if s.isEnabled(page.KindHome) {
 			// home pages
-			home := s.findPagesByKind(KindHome)
-			if len(home) > 1 {
+			homes := s.findWorkPagesByKind(page.KindHome)
+			if len(homes) > 1 {
 				panic("Too many homes")
 			}
-			if len(home) == 0 {
-				n := s.newHomePage()
-				s.Pages = append(s.Pages, n)
-				newPages = append(newPages, n)
+			var home *pageState
+			if len(homes) == 0 {
+				home = s.newPage(page.KindHome)
+				s.workAllPages = append(s.workAllPages, home)
+				newPages = append(newPages, home)
+			} else {
+				home = homes[0]
 			}
+
+			s.home = home
 		}
 
 		// Will create content-less root sections.
 		newSections := s.assembleSections()
-		s.Pages = append(s.Pages, newSections...)
+		s.workAllPages = append(s.workAllPages, newSections...)
 		newPages = append(newPages, newSections...)
 
+		taxonomyTermEnabled := s.isEnabled(page.KindTaxonomyTerm)
+		taxonomyEnabled := s.isEnabled(page.KindTaxonomy)
+
 		// taxonomy list and terms pages
-		taxonomies := s.Language.GetStringMapString("taxonomies")
+		taxonomies := s.Language().GetStringMapString("taxonomies")
 		if len(taxonomies) > 0 {
-			taxonomyPages := s.findPagesByKind(KindTaxonomy)
-			taxonomyTermsPages := s.findPagesByKind(KindTaxonomyTerm)
+			taxonomyPages := s.findWorkPagesByKind(page.KindTaxonomy)
+			taxonomyTermsPages := s.findWorkPagesByKind(page.KindTaxonomyTerm)
+
+			// Make them navigable from WeightedPage etc.
+			for _, p := range taxonomyPages {
+				p.getTaxonomyNodeInfo().TransferValues(p)
+			}
+			for _, p := range taxonomyTermsPages {
+				p.getTaxonomyNodeInfo().TransferValues(p)
+			}
+
 			for _, plural := range taxonomies {
-				if s.isEnabled(KindTaxonomyTerm) {
+				if taxonomyTermEnabled {
 					foundTaxonomyTermsPage := false
 					for _, p := range taxonomyTermsPages {
-						if p.sectionsPath() == plural {
+						if p.SectionsPath() == plural {
 							foundTaxonomyTermsPage = true
 							break
 						}
 					}
 
 					if !foundTaxonomyTermsPage {
-						n := s.newTaxonomyTermsPage(plural)
-						s.Pages = append(s.Pages, n)
+						n := s.newPage(page.KindTaxonomyTerm, plural)
+						n.getTaxonomyNodeInfo().TransferValues(n)
+						s.workAllPages = append(s.workAllPages, n)
 						newPages = append(newPages, n)
 					}
 				}
 
-				if s.isEnabled(KindTaxonomy) {
-					for key := range s.Taxonomies[plural] {
+				if taxonomyEnabled {
+					for termKey := range s.Taxonomies[plural] {
+
 						foundTaxonomyPage := false
-						origKey := key
 
-						if s.Info.preserveTaxonomyNames {
-							key = s.PathSpec.MakePathSanitized(key)
-						}
 						for _, p := range taxonomyPages {
-							sectionsPath := p.sectionsPath()
+							sectionsPath := p.SectionsPath()
 
 							if !strings.HasPrefix(sectionsPath, plural) {
 								continue
@@ -579,20 +686,21 @@ func (h *HugoSites) createMissingPages() error {
 							singularKey := strings.TrimPrefix(sectionsPath, plural)
 							singularKey = strings.TrimPrefix(singularKey, "/")
 
-							// Some people may have /authors/MaxMustermann etc. as paths.
-							// p.sections contains the raw values from the file system.
-							// See https://github.com/gohugoio/hugo/issues/4238
-							singularKey = s.PathSpec.MakePathSanitized(singularKey)
-
-							if singularKey == key {
+							if singularKey == termKey {
 								foundTaxonomyPage = true
 								break
 							}
 						}
 
 						if !foundTaxonomyPage {
-							n := s.newTaxonomyPage(plural, origKey)
-							s.Pages = append(s.Pages, n)
+							info := s.taxonomyNodes.Get(plural, termKey)
+							if info == nil {
+								panic("no info found")
+							}
+
+							n := s.newTaxonomyPage(info.term, info.plural, info.termKey)
+							info.TransferValues(n)
+							s.workAllPages = append(s.workAllPages, n)
 							newPages = append(newPages, n)
 						}
 					}
@@ -601,24 +709,6 @@ func (h *HugoSites) createMissingPages() error {
 		}
 	}
 
-	if len(newPages) > 0 {
-		// This resorting is unfortunate, but it also needs to be sorted
-		// when sections are created.
-		first := h.Sites[0]
-
-		first.AllPages = append(first.AllPages, newPages...)
-
-		first.AllPages.sort()
-
-		for _, s := range h.Sites {
-			s.Pages.sort()
-		}
-
-		for i := 1; i < len(h.Sites); i++ {
-			h.Sites[i].AllPages = first.AllPages
-		}
-	}
-
 	return nil
 }
 
@@ -628,61 +718,58 @@ func (h *HugoSites) removePageByFilename(filename string) {
 	}
 }
 
-func (h *HugoSites) setupTranslations() {
+func (h *HugoSites) createPageCollections() error {
 	for _, s := range h.Sites {
 		for _, p := range s.rawAllPages {
-			if p.Kind == kindUnknown {
-				p.Kind = p.kindFromSections()
-			}
-
-			if !p.s.isEnabled(p.Kind) {
+			if !s.isEnabled(p.Kind()) {
 				continue
 			}
 
-			shouldBuild := p.shouldBuild()
-			s.updateBuildStats(p)
+			shouldBuild := s.shouldBuild(p)
+			s.buildStats.update(p)
 			if shouldBuild {
-				if p.headless {
+				if p.m.headless {
 					s.headlessPages = append(s.headlessPages, p)
 				} else {
-					s.Pages = append(s.Pages, p)
+					s.workAllPages = append(s.workAllPages, p)
 				}
 			}
 		}
 	}
 
-	allPages := make(Pages, 0)
+	allPages := newLazyPagesFactory(func() page.Pages {
+		var pages page.Pages
+		for _, s := range h.Sites {
+			pages = append(pages, s.Pages()...)
+		}
 
-	for _, s := range h.Sites {
-		allPages = append(allPages, s.Pages...)
-	}
+		page.SortByDefault(pages)
 
-	allPages.sort()
+		return pages
+	})
 
-	for _, s := range h.Sites {
-		s.AllPages = allPages
-	}
+	allRegularPages := newLazyPagesFactory(func() page.Pages {
+		return h.findPagesByKindIn(page.KindPage, allPages.get())
+	})
 
-	// Pull over the collections from the master site
-	for i := 1; i < len(h.Sites); i++ {
-		h.Sites[i].Data = h.Sites[0].Data
+	for _, s := range h.Sites {
+		s.PageCollections.allPages = allPages
+		s.PageCollections.allRegularPages = allRegularPages
 	}
 
-	if len(h.Sites) > 1 {
-		allTranslations := pagesToTranslationsMap(allPages)
-		assignTranslationsToPages(allTranslations, allPages)
-	}
+	return nil
 }
 
-func (s *Site) preparePagesForRender(start bool) error {
-	for _, p := range s.Pages {
-		if err := p.prepareForRender(start); err != nil {
+func (s *Site) preparePagesForRender(idx int) error {
+
+	for _, p := range s.workAllPages {
+		if err := p.initOutputFormat(idx); err != nil {
 			return err
 		}
 	}
 
 	for _, p := range s.headlessPages {
-		if err := p.prepareForRender(start); err != nil {
+		if err := p.initOutputFormat(idx); err != nil {
 			return err
 		}
 	}
@@ -691,62 +778,141 @@ func (s *Site) preparePagesForRender(start bool) error {
 }
 
 // Pages returns all pages for all sites.
-func (h *HugoSites) Pages() Pages {
-	return h.Sites[0].AllPages
+func (h *HugoSites) Pages() page.Pages {
+	return h.Sites[0].AllPages()
 }
 
-func handleShortcodes(p *PageWithoutContent, rawContentCopy []byte) ([]byte, error) {
-	if p.shortcodeState != nil && p.shortcodeState.contentShortcodes.Len() > 0 {
-		p.s.Log.DEBUG.Printf("Replace %d shortcodes in %q", p.shortcodeState.contentShortcodes.Len(), p.BaseFileName())
-		err := p.shortcodeState.executeShortcodesForDelta(p)
+func (h *HugoSites) loadData(fs afero.Fs) (err error) {
+	spec := source.NewSourceSpec(h.PathSpec, fs)
+	fileSystem := spec.NewFilesystem("")
+	h.data = make(map[string]interface{})
+	for _, r := range fileSystem.Files() {
+		if err := h.handleDataFile(r); err != nil {
+			return err
+		}
+	}
 
-		if err != nil {
+	return
+}
+
+func (h *HugoSites) handleDataFile(r source.ReadableFile) error {
+	var current map[string]interface{}
 
-			return rawContentCopy, err
+	f, err := r.Open()
+	if err != nil {
+		return errors.Wrapf(err, "Failed to open data file %q:", r.LogicalName())
+	}
+	defer f.Close()
+
+	// Crawl in data tree to insert data
+	current = h.data
+	keyParts := strings.Split(r.Dir(), helpers.FilePathSeparator)
+	// The first path element is the virtual folder (typically theme name), which is
+	// not part of the key.
+	if len(keyParts) > 1 {
+		for _, key := range keyParts[1:] {
+			if key != "" {
+				if _, ok := current[key]; !ok {
+					current[key] = make(map[string]interface{})
+				}
+				current = current[key].(map[string]interface{})
+			}
 		}
+	}
 
-		rawContentCopy, err = replaceShortcodeTokens(rawContentCopy, shortcodePlaceholderPrefix, p.shortcodeState.renderedShortcodes)
+	data, err := h.readData(r)
+	if err != nil {
+		return h.errWithFileContext(err, r)
+	}
 
-		if err != nil {
-			p.s.Log.FATAL.Printf("Failed to replace shortcode tokens in %s:\n%s", p.BaseFileName(), err.Error())
+	if data == nil {
+		return nil
+	}
+
+	// filepath.Walk walks the files in lexical order, '/' comes before '.'
+	// this warning could happen if
+	// 1. A theme uses the same key; the main data folder wins
+	// 2. A sub folder uses the same key: the sub folder wins
+	higherPrecedentData := current[r.BaseFileName()]
+
+	switch data.(type) {
+	case nil:
+		// hear the crickets?
+
+	case map[string]interface{}:
+
+		switch higherPrecedentData.(type) {
+		case nil:
+			current[r.BaseFileName()] = data
+		case map[string]interface{}:
+			// merge maps: insert entries from data for keys that
+			// don't already exist in higherPrecedentData
+			higherPrecedentMap := higherPrecedentData.(map[string]interface{})
+			for key, value := range data.(map[string]interface{}) {
+				if _, exists := higherPrecedentMap[key]; exists {
+					h.Log.WARN.Printf("Data for key '%s' in path '%s' is overridden by higher precedence data already in the data tree", key, r.Path())
+				} else {
+					higherPrecedentMap[key] = value
+				}
+			}
+		default:
+			// can't merge: higherPrecedentData is not a map
+			h.Log.WARN.Printf("The %T data from '%s' overridden by "+
+				"higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData)
+		}
+
+	case []interface{}:
+		if higherPrecedentData == nil {
+			current[r.BaseFileName()] = data
+		} else {
+			// we don't merge array data
+			h.Log.WARN.Printf("The %T data from '%s' overridden by "+
+				"higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData)
 		}
+
+	default:
+		h.Log.ERROR.Printf("unexpected data type %T in file %s", data, r.LogicalName())
 	}
 
-	return rawContentCopy, nil
+	return nil
 }
 
-func (s *Site) updateBuildStats(page *Page) {
-	if page.IsDraft() {
-		s.draftCount++
+func (h *HugoSites) errWithFileContext(err error, f source.File) error {
+	rfi, ok := f.FileInfo().(hugofs.RealFilenameInfo)
+	if !ok {
+		return err
 	}
 
-	if page.IsFuture() {
-		s.futureCount++
-	}
+	realFilename := rfi.RealFilename()
 
-	if page.IsExpired() {
-		s.expiredCount++
-	}
-}
+	err, _ = herrors.WithFileContextForFile(
+		err,
+		realFilename,
+		realFilename,
+		h.SourceSpec.Fs.Source,
+		herrors.SimpleLineMatcher)
 
-func (h *HugoSites) findPagesByKindNotIn(kind string, inPages Pages) Pages {
-	return h.Sites[0].findPagesByKindNotIn(kind, inPages)
+	return err
 }
 
-func (h *HugoSites) findPagesByKindIn(kind string, inPages Pages) Pages {
-	return h.Sites[0].findPagesByKindIn(kind, inPages)
-}
+func (h *HugoSites) readData(f source.ReadableFile) (interface{}, error) {
+	file, err := f.Open()
+	if err != nil {
+		return nil, errors.Wrap(err, "readData: failed to open data file")
+	}
+	defer file.Close()
+	content := helpers.ReaderToBytes(file)
 
-func (h *HugoSites) findAllPagesByKind(kind string) Pages {
-	return h.findPagesByKindIn(kind, h.Sites[0].AllPages)
+	format := metadecoders.FormatFromString(f.Extension())
+	return metadecoders.Default.Unmarshal(content, format)
 }
 
-func (h *HugoSites) findAllPagesByKindNotIn(kind string) Pages {
-	return h.findPagesByKindNotIn(kind, h.Sites[0].AllPages)
+func (h *HugoSites) findPagesByKindIn(kind string, inPages page.Pages) page.Pages {
+	return h.Sites[0].findPagesByKindIn(kind, inPages)
 }
 
-func (h *HugoSites) findPagesByShortcode(shortcode string) Pages {
-	var pages Pages
+func (h *HugoSites) findPagesByShortcode(shortcode string) page.Pages {
+	var pages page.Pages
 	for _, s := range h.Sites {
 		pages = append(pages, s.findPagesByShortcode(shortcode)...)
 	}
diff --git a/hugolib/hugo_sites_build.go b/hugolib/hugo_sites_build.go
index ec5070fa814..214f72c5f64 100644
--- a/hugolib/hugo_sites_build.go
+++ b/hugolib/hugo_sites_build.go
@@ -1,4 +1,4 @@
-// Copyright 2016-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -15,7 +15,12 @@ package hugolib
 
 import (
 	"bytes"
+	"context"
 	"fmt"
+	"runtime/trace"
+	"sort"
+
+	"github.com/gohugoio/hugo/output"
 
 	"errors"
 
@@ -26,6 +31,9 @@ import (
 // Build builds all sites. If filesystem events are provided,
 // this is considered to be a potential partial rebuild.
 func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
+	ctx, task := trace.NewTask(context.Background(), "Build")
+	defer task.End()
+
 	errCollector := h.StartErrorCollector()
 	errs := make(chan error)
 
@@ -71,22 +79,36 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
 					return err
 				}
 			} else {
-				if err := h.init(conf); err != nil {
+				if err := h.initSites(conf); err != nil {
 					return err
 				}
 			}
 
-			if err := h.process(conf, events...); err != nil {
+			var err error
+
+			f := func() {
+				err = h.process(conf, events...)
+			}
+			trace.WithRegion(ctx, "process", f)
+			if err != nil {
 				return err
 			}
 
-			if err := h.assemble(conf); err != nil {
+			f = func() {
+				err = h.assemble(conf)
+			}
+			trace.WithRegion(ctx, "assemble", f)
+			if err != nil {
 				return err
 			}
+
 			return nil
 		}
 
-		prepareErr = prepare()
+		f := func() {
+			prepareErr = prepare()
+		}
+		trace.WithRegion(ctx, "prepare", f)
 		if prepareErr != nil {
 			h.SendError(prepareErr)
 		}
@@ -94,7 +116,12 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
 	}
 
 	if prepareErr == nil {
-		if err := h.render(conf); err != nil {
+		var err error
+		f := func() {
+			err = h.render(conf)
+		}
+		trace.WithRegion(ctx, "render", f)
+		if err != nil {
 			h.SendError(err)
 		}
 	}
@@ -120,6 +147,10 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
 		return err
 	}
 
+	if err := h.fatalErrorHandler.getErr(); err != nil {
+		return err
+	}
+
 	errorCount := h.Log.ErrorCounter.Count()
 	if errorCount > 0 {
 		return fmt.Errorf("logged %d error(s)", errorCount)
@@ -132,17 +163,8 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
 // Build lifecycle methods below.
 // The order listed matches the order of execution.
 
-func (h *HugoSites) init(config *BuildCfg) error {
-
-	for _, s := range h.Sites {
-		if s.PageCollections == nil {
-			s.PageCollections = newPageCollections()
-		}
-	}
-
-	if config.ResetState {
-		h.reset()
-	}
+func (h *HugoSites) initSites(config *BuildCfg) error {
+	h.reset(config)
 
 	if config.NewConfig != nil {
 		if err := h.createSitesFromConfig(config.NewConfig); err != nil {
@@ -155,28 +177,22 @@ func (h *HugoSites) init(config *BuildCfg) error {
 
 func (h *HugoSites) initRebuild(config *BuildCfg) error {
 	if config.NewConfig != nil {
-		return errors.New("Rebuild does not support 'NewConfig'.")
+		return errors.New("rebuild does not support 'NewConfig'")
 	}
 
 	if config.ResetState {
-		return errors.New("Rebuild does not support 'ResetState'.")
+		return errors.New("rebuild does not support 'ResetState'")
 	}
 
 	if !h.running {
-		return errors.New("Rebuild called when not in watch mode")
-	}
-
-	if config.whatChanged.source {
-		// This is for the non-renderable content pages (rarely used, I guess).
-		// We could maybe detect if this is really needed, but it should be
-		// pretty fast.
-		h.TemplateHandler().RebuildClone()
+		return errors.New("rebuild called when not in watch mode")
 	}
 
 	for _, s := range h.Sites {
 		s.resetBuildState()
 	}
 
+	h.reset(config)
 	h.resetLogs()
 	helpers.InitLoggers()
 
@@ -203,14 +219,6 @@ func (h *HugoSites) process(config *BuildCfg, events ...fsnotify.Event) error {
 }
 
 func (h *HugoSites) assemble(config *BuildCfg) error {
-	if config.whatChanged.source {
-		for _, s := range h.Sites {
-			s.createTaxonomiesEntries()
-		}
-	}
-
-	// TODO(bep) we could probably wait and do this in one go later
-	h.setupTranslations()
 
 	if len(h.Sites) > 1 {
 		// The first is initialized during process; initialize the rest
@@ -221,47 +229,26 @@ func (h *HugoSites) assemble(config *BuildCfg) error {
 		}
 	}
 
+	if err := h.createPageCollections(); err != nil {
+		return err
+	}
+
 	if config.whatChanged.source {
 		for _, s := range h.Sites {
-			if err := s.buildSiteMeta(); err != nil {
+			if err := s.assembleTaxonomies(); err != nil {
 				return err
 			}
 		}
 	}
 
+	// Create pagexs for the section pages etc. without content file.
 	if err := h.createMissingPages(); err != nil {
 		return err
 	}
 
 	for _, s := range h.Sites {
-		for _, pages := range []Pages{s.Pages, s.headlessPages} {
-			for _, p := range pages {
-				// May have been set in front matter
-				if len(p.outputFormats) == 0 {
-					p.outputFormats = s.outputFormats[p.Kind]
-				}
-
-				if p.headless {
-					// headless = 1 output format only
-					p.outputFormats = p.outputFormats[:1]
-				}
-				for _, r := range p.Resources.ByType(pageResourceType) {
-					r.(*Page).outputFormats = p.outputFormats
-				}
-
-				if err := p.initPaths(); err != nil {
-					return err
-				}
-
-			}
-		}
-		s.assembleMenus()
-		s.refreshPageCaches()
 		s.setupSitePages()
-	}
-
-	if err := h.assignMissingTranslations(); err != nil {
-		return err
+		sort.Stable(s.workAllPages)
 	}
 
 	return nil
@@ -269,42 +256,60 @@ func (h *HugoSites) assemble(config *BuildCfg) error {
 }
 
 func (h *HugoSites) render(config *BuildCfg) error {
+	siteRenderContext := &siteRenderContext{cfg: config, multihost: h.multihost}
+
 	if !config.PartialReRender {
+		h.renderFormats = output.Formats{}
 		for _, s := range h.Sites {
 			s.initRenderFormats()
+			h.renderFormats = append(h.renderFormats, s.renderFormats...)
 		}
 	}
 
+	i := 0
 	for _, s := range h.Sites {
-		for i, rf := range s.renderFormats {
-			for _, s2 := range h.Sites {
-				// We render site by site, but since the content is lazily rendered
-				// and a site can "borrow" content from other sites, every site
-				// needs this set.
-				s2.rc = &siteRenderingContext{Format: rf}
-
-				isRenderingSite := s == s2
-
-				if !config.PartialReRender {
-					if err := s2.preparePagesForRender(isRenderingSite && i == 0); err != nil {
-						return err
+		for siteOutIdx, renderFormat := range s.renderFormats {
+			siteRenderContext.outIdx = siteOutIdx
+			siteRenderContext.sitesOutIdx = i
+			i++
+
+			select {
+			case <-h.Done():
+				return nil
+			default:
+				// For the non-renderable pages, we use the content iself as
+				// template and we may have to re-parse and execute it for
+				// each output format.
+				h.TemplateHandler().RebuildClone()
+
+				for _, s2 := range h.Sites {
+					// We render site by site, but since the content is lazily rendered
+					// and a site can "borrow" content from other sites, every site
+					// needs this set.
+					s2.rc = &siteRenderingContext{Format: renderFormat}
+
+					if !config.PartialReRender {
+						if err := s2.preparePagesForRender(siteRenderContext.sitesOutIdx); err != nil {
+							return err
+						}
 					}
 				}
 
-			}
-
-			if !config.SkipRender {
-				if config.PartialReRender {
-					if err := s.renderPages(config); err != nil {
-						return err
-					}
-				} else {
-					if err := s.render(config, i); err != nil {
-						return err
+				if !config.SkipRender {
+					if config.PartialReRender {
+						if err := s.renderPages(siteRenderContext); err != nil {
+							return err
+						}
+					} else {
+						if err := s.render(siteRenderContext); err != nil {
+							return err
+						}
 					}
 				}
 			}
+
 		}
+
 	}
 
 	if !config.SkipRender {
diff --git a/hugolib/hugo_sites_build_errors_test.go b/hugolib/hugo_sites_build_errors_test.go
index dd80946e8ac..6fe4901a1c8 100644
--- a/hugolib/hugo_sites_build_errors_test.go
+++ b/hugolib/hugo_sites_build_errors_test.go
@@ -7,6 +7,9 @@ import (
 	"runtime"
 	"strings"
 	"testing"
+	"time"
+
+	"github.com/fortytw2/leaktest"
 
 	"github.com/gohugoio/hugo/common/herrors"
 	"github.com/stretchr/testify/require"
@@ -20,25 +23,24 @@ type testSiteBuildErrorAsserter struct {
 func (t testSiteBuildErrorAsserter) getFileError(err error) *herrors.ErrorWithFileContext {
 	t.assert.NotNil(err, t.name)
 	ferr := herrors.UnwrapErrorWithFileContext(err)
-	t.assert.NotNil(ferr, fmt.Sprintf("[%s] got %T: %+v\n%s", t.name, err, err, trace()))
+	t.assert.NotNil(ferr, fmt.Sprintf("[%s] got %T: %+v\n%s", t.name, err, err, stackTrace()))
 	return ferr
 }
 
 func (t testSiteBuildErrorAsserter) assertLineNumber(lineNumber int, err error) {
 	fe := t.getFileError(err)
-	t.assert.Equal(lineNumber, fe.Position().LineNumber, fmt.Sprintf("[%s]  got => %s\n%s", t.name, fe, trace()))
+	t.assert.Equal(lineNumber, fe.Position().LineNumber, fmt.Sprintf("[%s]  got => %s\n%s", t.name, fe, stackTrace()))
 }
 
 func (t testSiteBuildErrorAsserter) assertErrorMessage(e1, e2 string) {
 	// The error message will contain filenames with OS slashes. Normalize before compare.
 	e1, e2 = filepath.ToSlash(e1), filepath.ToSlash(e2)
-	t.assert.Contains(e2, e1, trace())
+	t.assert.Contains(e2, e1, stackTrace())
 
 }
 
 func TestSiteBuildErrors(t *testing.T) {
 	t.Parallel()
-	assert := require.New(t)
 
 	const (
 		yamlcontent = "yamlcontent"
@@ -88,9 +90,9 @@ func TestSiteBuildErrors(t *testing.T) {
 			},
 			assertCreateError: func(a testSiteBuildErrorAsserter, err error) {
 				fe := a.getFileError(err)
-				assert.Equal(5, fe.Position().LineNumber)
-				assert.Equal(1, fe.Position().ColumnNumber)
-				assert.Equal("go-html-template", fe.ChromaLexer)
+				a.assert.Equal(5, fe.Position().LineNumber)
+				a.assert.Equal(1, fe.Position().ColumnNumber)
+				a.assert.Equal("go-html-template", fe.ChromaLexer)
 				a.assertErrorMessage("\"layouts/_default/single.html:5:1\": parse failed: template: _default/single.html:5: unexpected \"}\" in operand", fe.Error())
 
 			},
@@ -103,9 +105,9 @@ func TestSiteBuildErrors(t *testing.T) {
 			},
 			assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
 				fe := a.getFileError(err)
-				assert.Equal(5, fe.Position().LineNumber)
-				assert.Equal(14, fe.Position().ColumnNumber)
-				assert.Equal("go-html-template", fe.ChromaLexer)
+				a.assert.Equal(5, fe.Position().LineNumber)
+				a.assert.Equal(14, fe.Position().ColumnNumber)
+				a.assert.Equal("go-html-template", fe.ChromaLexer)
 				a.assertErrorMessage("\"layouts/_default/single.html:5:14\": execute of template failed", fe.Error())
 
 			},
@@ -118,9 +120,9 @@ func TestSiteBuildErrors(t *testing.T) {
 			},
 			assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
 				fe := a.getFileError(err)
-				assert.Equal(5, fe.Position().LineNumber)
-				assert.Equal(14, fe.Position().ColumnNumber)
-				assert.Equal("go-html-template", fe.ChromaLexer)
+				a.assert.Equal(5, fe.Position().LineNumber)
+				a.assert.Equal(14, fe.Position().ColumnNumber)
+				a.assert.Equal("go-html-template", fe.ChromaLexer)
 				a.assertErrorMessage("\"layouts/_default/single.html:5:14\": execute of template failed", fe.Error())
 
 			},
@@ -143,8 +145,8 @@ func TestSiteBuildErrors(t *testing.T) {
 			},
 			assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
 				fe := a.getFileError(err)
-				assert.Equal(7, fe.Position().LineNumber)
-				assert.Equal("md", fe.ChromaLexer)
+				a.assert.Equal(7, fe.Position().LineNumber)
+				a.assert.Equal("md", fe.ChromaLexer)
 				// Make sure that it contains both the content file and template
 				a.assertErrorMessage(`content/myyaml.md:7:10": failed to render shortcode "sc"`, fe.Error())
 				a.assertErrorMessage(`shortcodes/sc.html:4:22: executing "shortcodes/sc.html" at <.Page.Titles>: can't evaluate`, fe.Error())
@@ -158,10 +160,10 @@ func TestSiteBuildErrors(t *testing.T) {
 			},
 			assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
 				fe := a.getFileError(err)
-				assert.Equal(7, fe.Position().LineNumber)
-				assert.Equal(14, fe.Position().ColumnNumber)
-				assert.Equal("md", fe.ChromaLexer)
-				a.assertErrorMessage("\"content/myyaml.md:7:14\": failed to extract shortcode: template for shortcode \"nono\" not found", fe.Error())
+				a.assert.Equal(7, fe.Position().LineNumber)
+				a.assert.Equal(10, fe.Position().ColumnNumber)
+				a.assert.Equal("md", fe.ChromaLexer)
+				a.assertErrorMessage(`"content/myyaml.md:7:10": failed to extract shortcode: template for shortcode "nono" not found`, fe.Error())
 			},
 		},
 		{
@@ -182,8 +184,8 @@ func TestSiteBuildErrors(t *testing.T) {
 			},
 			assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
 				fe := a.getFileError(err)
-				assert.Equal(6, fe.Position().LineNumber)
-				assert.Equal("toml", fe.ErrorContext.ChromaLexer)
+				a.assert.Equal(6, fe.Position().LineNumber)
+				a.assert.Equal("toml", fe.ErrorContext.ChromaLexer)
 
 			},
 		},
@@ -196,8 +198,8 @@ func TestSiteBuildErrors(t *testing.T) {
 			assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
 				fe := a.getFileError(err)
 
-				assert.Equal(3, fe.Position().LineNumber)
-				assert.Equal("json", fe.ErrorContext.ChromaLexer)
+				a.assert.Equal(3, fe.Position().LineNumber)
+				a.assert.Equal("json", fe.ErrorContext.ChromaLexer)
 
 			},
 		},
@@ -210,42 +212,43 @@ func TestSiteBuildErrors(t *testing.T) {
 			},
 
 			assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
-				assert.Error(err)
+				a.assert.Error(err)
 				// This is fixed in latest Go source
 				if regexp.MustCompile("devel|12").MatchString(runtime.Version()) {
 					fe := a.getFileError(err)
-					assert.Equal(5, fe.Position().LineNumber)
-					assert.Equal(21, fe.Position().ColumnNumber)
+					a.assert.Equal(5, fe.Position().LineNumber)
+					a.assert.Equal(21, fe.Position().ColumnNumber)
 				} else {
-					assert.Contains(err.Error(), `execute of template failed: panic in Execute`)
+					a.assert.Contains(err.Error(), `execute of template failed: panic in Execute`)
 				}
 			},
 		},
 	}
 
 	for _, test := range tests {
+		t.Run(test.name, func(t *testing.T) {
+			assert := require.New(t)
+			errorAsserter := testSiteBuildErrorAsserter{
+				assert: assert,
+				name:   test.name,
+			}
 
-		errorAsserter := testSiteBuildErrorAsserter{
-			assert: assert,
-			name:   test.name,
-		}
+			b := newTestSitesBuilder(t).WithSimpleConfigFile()
 
-		b := newTestSitesBuilder(t).WithSimpleConfigFile()
+			f := func(fileType, content string) string {
+				if fileType != test.fileType {
+					return content
+				}
+				return test.fileFixer(content)
 
-		f := func(fileType, content string) string {
-			if fileType != test.fileType {
-				return content
 			}
-			return test.fileFixer(content)
 
-		}
-
-		b.WithTemplatesAdded("layouts/shortcodes/sc.html", f(shortcode, `SHORTCODE L1
+			b.WithTemplatesAdded("layouts/shortcodes/sc.html", f(shortcode, `SHORTCODE L1
 SHORTCODE L2
 SHORTCODE L3:
 SHORTCODE L4: {{ .Page.Title }}
 `))
-		b.WithTemplatesAdded("layouts/_default/baseof.html", f(base, `BASEOF L1
+			b.WithTemplatesAdded("layouts/_default/baseof.html", f(base, `BASEOF L1
 BASEOF L2
 BASEOF L3
 BASEOF L4{{ if .Title }}{{ end }}
@@ -253,7 +256,7 @@ BASEOF L4{{ if .Title }}{{ end }}
 BASEOF L6
 `))
 
-		b.WithTemplatesAdded("layouts/_default/single.html", f(single, `{{ define "main" }}
+			b.WithTemplatesAdded("layouts/_default/single.html", f(single, `{{ define "main" }}
 SINGLE L2:
 SINGLE L3:
 SINGLE L4:
@@ -261,7 +264,7 @@ SINGLE L5: {{ .Title }} {{ .Content }}
 {{ end }}
 `))
 
-		b.WithContent("myyaml.md", f(yamlcontent, `---
+			b.WithContent("myyaml.md", f(yamlcontent, `---
 title: "The YAML"
 ---
 
@@ -275,7 +278,7 @@ The end.
 
 `))
 
-		b.WithContent("mytoml.md", f(tomlcontent, `+++
+			b.WithContent("mytoml.md", f(tomlcontent, `+++
 title = "The TOML"
 p1 = "v"
 p2 = "v"
@@ -288,7 +291,7 @@ Some content.
 
 `))
 
-		b.WithContent("myjson.md", f(jsoncontent, `{
+			b.WithContent("myjson.md", f(jsoncontent, `{
 	"title": "This is a title",
 	"description": "This is a description."
 }
@@ -298,26 +301,30 @@ Some content.
 
 `))
 
-		createErr := b.CreateSitesE()
-		if test.assertCreateError != nil {
-			test.assertCreateError(errorAsserter, createErr)
-		} else {
-			assert.NoError(createErr)
-		}
-
-		if createErr == nil {
-			buildErr := b.BuildE(BuildCfg{})
-			if test.assertBuildError != nil {
-				test.assertBuildError(errorAsserter, buildErr)
+			createErr := b.CreateSitesE()
+			if test.assertCreateError != nil {
+				test.assertCreateError(errorAsserter, createErr)
 			} else {
-				assert.NoError(buildErr)
+				assert.NoError(createErr)
 			}
-		}
+
+			if createErr == nil {
+				buildErr := b.BuildE(BuildCfg{})
+				if test.assertBuildError != nil {
+					test.assertBuildError(errorAsserter, buildErr)
+				} else {
+					assert.NoError(buildErr)
+				}
+			}
+		})
 	}
 }
 
 // https://github.com/gohugoio/hugo/issues/5375
 func TestSiteBuildTimeout(t *testing.T) {
+	if !isCI() {
+		defer leaktest.CheckTimeout(t, 10*time.Second)()
+	}
 
 	b := newTestSitesBuilder(t)
 	b.WithConfigFile("toml", `
@@ -342,6 +349,6 @@ title: "A page"
 
 	}
 
-	b.CreateSites().Build(BuildCfg{})
+	b.CreateSites().BuildFail(BuildCfg{})
 
 }
diff --git a/hugolib/hugo_sites_build_test.go b/hugolib/hugo_sites_build_test.go
index 83b96b7f4e7..88a20652979 100644
--- a/hugolib/hugo_sites_build_test.go
+++ b/hugolib/hugo_sites_build_test.go
@@ -1,16 +1,16 @@
 package hugolib
 
 import (
-	"bytes"
 	"fmt"
 	"strings"
 	"testing"
 
-	"html/template"
 	"os"
 	"path/filepath"
 	"time"
 
+	"github.com/gohugoio/hugo/resources/page"
+
 	"github.com/fortytw2/leaktest"
 	"github.com/fsnotify/fsnotify"
 	"github.com/gohugoio/hugo/helpers"
@@ -66,8 +66,8 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
 
 	assert.Equal("/blog/en/foo", enSite.PathSpec.RelURL("foo", true))
 
-	doc1en := enSite.RegularPages[0]
-	doc1fr := frSite.RegularPages[0]
+	doc1en := enSite.RegularPages()[0]
+	doc1fr := frSite.RegularPages()[0]
 
 	enPerm := doc1en.Permalink()
 	enRelPerm := doc1en.RelPermalink()
@@ -100,7 +100,7 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
 	// Check list pages
 	b.AssertFileContent(pathMod("public/fr/sect/index.html"), "List", "Bonjour")
 	b.AssertFileContent("public/en/sect/index.html", "List", "Hello")
-	b.AssertFileContent(pathMod("public/fr/plaques/frtag1/index.html"), "Taxonomy List", "Bonjour")
+	b.AssertFileContent(pathMod("public/fr/plaques/FRtag1/index.html"), "Taxonomy List", "Bonjour")
 	b.AssertFileContent("public/en/tags/tag1/index.html", "Taxonomy List", "Hello")
 
 	// Check sitemaps
@@ -126,8 +126,8 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
 		pathMod(`<atom:link href="http://example.com/blog/fr/sect/index.xml"`))
 	b.AssertFileContent("public/en/sect/index.xml", `<atom:link href="http://example.com/blog/en/sect/index.xml"`)
 	b.AssertFileContent(
-		pathMod("public/fr/plaques/frtag1/index.xml"),
-		pathMod(`<atom:link href="http://example.com/blog/fr/plaques/frtag1/index.xml"`))
+		pathMod("public/fr/plaques/FRtag1/index.xml"),
+		pathMod(`<atom:link href="http://example.com/blog/fr/plaques/FRtag1/index.xml"`))
 	b.AssertFileContent("public/en/tags/tag1/index.xml", `<atom:link href="http://example.com/blog/en/tags/tag1/index.xml"`)
 
 	// Check paginators
@@ -140,12 +140,12 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
 	b.AssertFileContent(pathMod("public/fr/sect/page/2/index.html"), "List Page 2", "Bonjour", pathMod("http://example.com/blog/fr/sect/"))
 	b.AssertFileContent("public/en/sect/page/2/index.html", "List Page 2", "Hello", "http://example.com/blog/en/sect/")
 	b.AssertFileContent(
-		pathMod("public/fr/plaques/frtag1/page/1/index.html"),
-		pathMod(`refresh" content="0; url=http://example.com/blog/fr/plaques/frtag1/"`))
+		pathMod("public/fr/plaques/FRtag1/page/1/index.html"),
+		pathMod(`refresh" content="0; url=http://example.com/blog/fr/plaques/FRtag1/"`))
 	b.AssertFileContent("public/en/tags/tag1/page/1/index.html", `refresh" content="0; url=http://example.com/blog/en/tags/tag1/"`)
 	b.AssertFileContent(
-		pathMod("public/fr/plaques/frtag1/page/2/index.html"), "List Page 2", "Bonjour",
-		pathMod("http://example.com/blog/fr/plaques/frtag1/"))
+		pathMod("public/fr/plaques/FRtag1/page/2/index.html"), "List Page 2", "Bonjour",
+		pathMod("http://example.com/blog/fr/plaques/FRtag1/"))
 	b.AssertFileContent("public/en/tags/tag1/page/2/index.html", "List Page 2", "Hello", "http://example.com/blog/en/tags/tag1/")
 	// nn (Nynorsk) and nb (Bokmål) have custom pagePath: side ("page" in Norwegian)
 	b.AssertFileContent("public/nn/side/1/index.html", `refresh" content="0; url=http://example.com/blog/nn/"`)
@@ -183,12 +183,12 @@ p1 = "p1en"
 	assert.Len(sites, 2)
 
 	nnSite := sites[0]
-	nnHome := nnSite.getPage(KindHome)
+	nnHome := nnSite.getPage(page.KindHome)
 	assert.Len(nnHome.AllTranslations(), 2)
 	assert.Len(nnHome.Translations(), 1)
 	assert.True(nnHome.IsTranslated())
 
-	enHome := sites[1].getPage(KindHome)
+	enHome := sites[1].getPage(page.KindHome)
 
 	p1, err := enHome.Param("p1")
 	assert.NoError(err)
@@ -199,9 +199,7 @@ p1 = "p1en"
 	assert.Equal("p1nn", p1)
 }
 
-//
 func TestMultiSitesBuild(t *testing.T) {
-	t.Parallel()
 
 	for _, config := range []struct {
 		content string
@@ -211,7 +209,11 @@ func TestMultiSitesBuild(t *testing.T) {
 		{multiSiteYAMLConfigTemplate, "yml"},
 		{multiSiteJSONConfigTemplate, "json"},
 	} {
-		doTestMultiSitesBuild(t, config.content, config.suffix)
+
+		t.Run(config.suffix, func(t *testing.T) {
+			t.Parallel()
+			doTestMultiSitesBuild(t, config.content, config.suffix)
+		})
 	}
 }
 
@@ -228,64 +230,51 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
 
 	// Check site config
 	for _, s := range sites {
-		require.True(t, s.Info.defaultContentLanguageInSubdir, s.Info.Title)
+		require.True(t, s.Info.defaultContentLanguageInSubdir, s.Info.title)
 		require.NotNil(t, s.disabledKinds)
 	}
 
 	gp1 := b.H.GetContentPage(filepath.FromSlash("content/sect/doc1.en.md"))
 	require.NotNil(t, gp1)
-	require.Equal(t, "doc1", gp1.title)
+	require.Equal(t, "doc1", gp1.Title())
 	gp2 := b.H.GetContentPage(filepath.FromSlash("content/dummysect/notfound.md"))
 	require.Nil(t, gp2)
 
 	enSite := sites[0]
-	enSiteHome := enSite.getPage(KindHome)
+	enSiteHome := enSite.getPage(page.KindHome)
 	require.True(t, enSiteHome.IsTranslated())
 
-	require.Equal(t, "en", enSite.Language.Lang)
-
-	assert.Equal(5, len(enSite.RegularPages))
-	assert.Equal(32, len(enSite.AllPages))
+	require.Equal(t, "en", enSite.language.Lang)
 
-	doc1en := enSite.RegularPages[0]
-	permalink := doc1en.Permalink()
-	require.Equal(t, "http://example.com/blog/en/sect/doc1-slug/", permalink, "invalid doc1.en permalink")
-	require.Len(t, doc1en.Translations(), 1, "doc1-en should have one translation, excluding itself")
+	assert.Equal(5, len(enSite.RegularPages()))
+	assert.Equal(32, len(enSite.AllPages()))
 
-	doc2 := enSite.RegularPages[1]
-	permalink = doc2.Permalink()
-	require.Equal(t, "http://example.com/blog/en/sect/doc2/", permalink, "invalid doc2 permalink")
+	// Check 404s
+	b.AssertFileContent("public/en/404.html", "404|en|404 Page not found")
+	b.AssertFileContent("public/fr/404.html", "404|fr|404 Page not found")
 
-	doc3 := enSite.RegularPages[2]
-	permalink = doc3.Permalink()
-	// Note that /superbob is a custom URL set in frontmatter.
-	// We respect that URL literally (it can be /search.json)
-	// and do no not do any language code prefixing.
-	require.Equal(t, "http://example.com/blog/superbob/", permalink, "invalid doc3 permalink")
+	// Check robots.txt
+	b.AssertFileContent("public/en/robots.txt", "robots|en|")
+	b.AssertFileContent("public/nn/robots.txt", "robots|nn|")
 
-	require.Equal(t, "/superbob", doc3.URL(), "invalid url, was specified on doc3")
-	b.AssertFileContent("public/superbob/index.html", "doc3|Hello|en")
-	require.Equal(t, doc2.PrevPage, doc3, "doc3 should follow doc2, in .PrevPage")
+	b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Permalink: http://example.com/blog/en/sect/doc1-slug/")
+	b.AssertFileContent("public/en/sect/doc2/index.html", "Permalink: http://example.com/blog/en/sect/doc2/")
+	b.AssertFileContent("public/superbob/index.html", "Permalink: http://example.com/blog/superbob/")
 
+	doc2 := enSite.RegularPages()[1]
+	doc3 := enSite.RegularPages()[2]
+	require.Equal(t, doc2.Prev(), doc3, "doc3 should follow doc2, in .PrevPage")
+	doc1en := enSite.RegularPages()[0]
 	doc1fr := doc1en.Translations()[0]
-	permalink = doc1fr.Permalink()
-	require.Equal(t, "http://example.com/blog/fr/sect/doc1/", permalink, "invalid doc1fr permalink")
+	b.AssertFileContent("public/fr/sect/doc1/index.html", "Permalink: http://example.com/blog/fr/sect/doc1/")
 
 	require.Equal(t, doc1en.Translations()[0], doc1fr, "doc1-en should have doc1-fr as translation")
 	require.Equal(t, doc1fr.Translations()[0], doc1en, "doc1-fr should have doc1-en as translation")
 	require.Equal(t, "fr", doc1fr.Language().Lang)
 
-	doc4 := enSite.AllPages[4]
-	permalink = doc4.Permalink()
-	require.Equal(t, "http://example.com/blog/fr/sect/doc4/", permalink, "invalid doc4 permalink")
-	require.Equal(t, "/blog/fr/sect/doc4/", doc4.URL())
-
+	doc4 := enSite.AllPages()[4]
 	require.Len(t, doc4.Translations(), 0, "found translations for doc4")
 
-	doc5 := enSite.AllPages[5]
-	permalink = doc5.Permalink()
-	require.Equal(t, "http://example.com/blog/fr/somewhere/else/doc5/", permalink, "invalid doc5 permalink")
-
 	// Taxonomies and their URLs
 	require.Len(t, enSite.Taxonomies, 1, "should have 1 taxonomy")
 	tags := enSite.Taxonomies["tags"]
@@ -294,12 +283,13 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
 
 	frSite := sites[1]
 
-	require.Equal(t, "fr", frSite.Language.Lang)
-	require.Len(t, frSite.RegularPages, 4, "should have 3 pages")
-	require.Len(t, frSite.AllPages, 32, "should have 32 total pages (including translations and nodes)")
+	require.Equal(t, "fr", frSite.language.Lang)
+	require.Len(t, frSite.RegularPages(), 4, "should have 3 pages")
+	require.Len(t, frSite.AllPages(), 32, "should have 32 total pages (including translations and nodes)")
 
-	for _, frenchPage := range frSite.RegularPages {
-		require.Equal(t, "fr", frenchPage.Lang())
+	for _, frenchPage := range frSite.RegularPages() {
+		p := frenchPage
+		require.Equal(t, "fr", p.Language().Lang)
 	}
 
 	// See https://github.com/gohugoio/hugo/issues/4285
@@ -307,10 +297,10 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
 	// isn't ideal in a multilingual setup. You want a way to get the current language version if available.
 	// Now you can do lookups with translation base name to get that behaviour.
 	// Let us test all the regular page variants:
-	getPageDoc1En := enSite.getPage(KindPage, filepath.ToSlash(doc1en.Path()))
-	getPageDoc1EnBase := enSite.getPage(KindPage, "sect/doc1")
-	getPageDoc1Fr := frSite.getPage(KindPage, filepath.ToSlash(doc1fr.Path()))
-	getPageDoc1FrBase := frSite.getPage(KindPage, "sect/doc1")
+	getPageDoc1En := enSite.getPage(page.KindPage, filepath.ToSlash(doc1en.File().Path()))
+	getPageDoc1EnBase := enSite.getPage(page.KindPage, "sect/doc1")
+	getPageDoc1Fr := frSite.getPage(page.KindPage, filepath.ToSlash(doc1fr.File().Path()))
+	getPageDoc1FrBase := frSite.getPage(page.KindPage, "sect/doc1")
 	require.Equal(t, doc1en, getPageDoc1En)
 	require.Equal(t, doc1fr, getPageDoc1Fr)
 	require.Equal(t, doc1en, getPageDoc1EnBase)
@@ -328,35 +318,36 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
 	b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Shortcode: Hello", "LingoDefault")
 
 	// Check node translations
-	homeEn := enSite.getPage(KindHome)
+	homeEn := enSite.getPage(page.KindHome)
 	require.NotNil(t, homeEn)
 	require.Len(t, homeEn.Translations(), 3)
-	require.Equal(t, "fr", homeEn.Translations()[0].Lang())
-	require.Equal(t, "nn", homeEn.Translations()[1].Lang())
-	require.Equal(t, "På nynorsk", homeEn.Translations()[1].title)
-	require.Equal(t, "nb", homeEn.Translations()[2].Lang())
-	require.Equal(t, "På bokmål", homeEn.Translations()[2].title, configSuffix)
+	require.Equal(t, "fr", homeEn.Translations()[0].Language().Lang)
+	require.Equal(t, "nn", homeEn.Translations()[1].Language().Lang)
+	require.Equal(t, "På nynorsk", homeEn.Translations()[1].Title())
+	require.Equal(t, "nb", homeEn.Translations()[2].Language().Lang)
+	require.Equal(t, "På bokmål", homeEn.Translations()[2].Title(), configSuffix)
 	require.Equal(t, "Bokmål", homeEn.Translations()[2].Language().LanguageName, configSuffix)
 
-	sectFr := frSite.getPage(KindSection, "sect")
+	sectFr := frSite.getPage(page.KindSection, "sect")
 	require.NotNil(t, sectFr)
 
-	require.Equal(t, "fr", sectFr.Lang())
+	require.Equal(t, "fr", sectFr.Language().Lang)
 	require.Len(t, sectFr.Translations(), 1)
-	require.Equal(t, "en", sectFr.Translations()[0].Lang())
-	require.Equal(t, "Sects", sectFr.Translations()[0].title)
+	require.Equal(t, "en", sectFr.Translations()[0].Language().Lang)
+	require.Equal(t, "Sects", sectFr.Translations()[0].Title())
 
 	nnSite := sites[2]
-	require.Equal(t, "nn", nnSite.Language.Lang)
-	taxNn := nnSite.getPage(KindTaxonomyTerm, "lag")
+	require.Equal(t, "nn", nnSite.language.Lang)
+	taxNn := nnSite.getPage(page.KindTaxonomyTerm, "lag")
 	require.NotNil(t, taxNn)
 	require.Len(t, taxNn.Translations(), 1)
-	require.Equal(t, "nb", taxNn.Translations()[0].Lang())
+	require.Equal(t, "nb", taxNn.Translations()[0].Language().Lang)
 
-	taxTermNn := nnSite.getPage(KindTaxonomy, "lag", "sogndal")
+	taxTermNn := nnSite.getPage(page.KindTaxonomy, "lag", "sogndal")
 	require.NotNil(t, taxTermNn)
+	require.Equal(t, taxTermNn, nnSite.getPage(page.KindTaxonomy, "LAG", "SOGNDAL"))
 	require.Len(t, taxTermNn.Translations(), 1)
-	require.Equal(t, "nb", taxTermNn.Translations()[0].Lang())
+	require.Equal(t, "nb", taxTermNn.Translations()[0].Language().Lang)
 
 	// Check sitemap(s)
 	b.AssertFileContent("public/sitemap.xml",
@@ -371,59 +362,53 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
 	require.Len(t, enTags, 2, fmt.Sprintf("Tags in en: %v", enTags))
 	require.Len(t, frTags, 2, fmt.Sprintf("Tags in fr: %v", frTags))
 	require.NotNil(t, enTags["tag1"])
-	require.NotNil(t, frTags["frtag1"])
-	b.AssertFileContent("public/fr/plaques/frtag1/index.html", "Frtag1|Bonjour|http://example.com/blog/fr/plaques/frtag1/")
-	b.AssertFileContent("public/en/tags/tag1/index.html", "Tag1|Hello|http://example.com/blog/en/tags/tag1/")
+	require.NotNil(t, frTags["FRtag1"])
+	b.AssertFileContent("public/fr/plaques/FRtag1/index.html", "FRtag1|Bonjour|http://example.com/blog/fr/plaques/FRtag1/")
+	b.AssertFileContent("public/en/tags/tag1/index.html", "tag1|Hello|http://example.com/blog/en/tags/tag1/")
 
 	// Check Blackfriday config
-	require.True(t, strings.Contains(string(doc1fr.content()), "&laquo;"), string(doc1fr.content()))
-	require.False(t, strings.Contains(string(doc1en.content()), "&laquo;"), string(doc1en.content()))
-	require.True(t, strings.Contains(string(doc1en.content()), "&ldquo;"), string(doc1en.content()))
-
-	// Check that the drafts etc. are not built/processed/rendered.
-	assertShouldNotBuild(t, b.H)
+	require.True(t, strings.Contains(content(doc1fr), "&laquo;"), content(doc1fr))
+	require.False(t, strings.Contains(content(doc1en), "&laquo;"), content(doc1en))
+	require.True(t, strings.Contains(content(doc1en), "&ldquo;"), content(doc1en))
 
 	// en and nn have custom site menus
-	require.Len(t, frSite.Menus, 0, "fr: "+configSuffix)
-	require.Len(t, enSite.Menus, 1, "en: "+configSuffix)
-	require.Len(t, nnSite.Menus, 1, "nn: "+configSuffix)
+	require.Len(t, frSite.Menus(), 0, "fr: "+configSuffix)
+	require.Len(t, enSite.Menus(), 1, "en: "+configSuffix)
+	require.Len(t, nnSite.Menus(), 1, "nn: "+configSuffix)
 
-	require.Equal(t, "Home", enSite.Menus["main"].ByName()[0].Name)
-	require.Equal(t, "Heim", nnSite.Menus["main"].ByName()[0].Name)
-
-	// Issue #1302
-	require.Equal(t, template.URL(""), enSite.RegularPages[0].RSSLink())
+	require.Equal(t, "Home", enSite.Menus()["main"].ByName()[0].Name)
+	require.Equal(t, "Heim", nnSite.Menus()["main"].ByName()[0].Name)
 
 	// Issue #3108
-	prevPage := enSite.RegularPages[0].PrevPage
+	prevPage := enSite.RegularPages()[0].Prev()
 	require.NotNil(t, prevPage)
-	require.Equal(t, KindPage, prevPage.Kind)
+	require.Equal(t, page.KindPage, prevPage.Kind())
 
 	for {
 		if prevPage == nil {
 			break
 		}
-		require.Equal(t, KindPage, prevPage.Kind)
-		prevPage = prevPage.PrevPage
+		require.Equal(t, page.KindPage, prevPage.Kind())
+		prevPage = prevPage.Prev()
 	}
 
 	// Check bundles
-	bundleFr := frSite.getPage(KindPage, "bundles/b1/index.md")
+	b.AssertFileContent("public/fr/bundles/b1/index.html", "RelPermalink: /blog/fr/bundles/b1/|")
+	bundleFr := frSite.getPage(page.KindPage, "bundles/b1/index.md")
 	require.NotNil(t, bundleFr)
-	require.Equal(t, "/blog/fr/bundles/b1/", bundleFr.RelPermalink())
-	require.Equal(t, 1, len(bundleFr.Resources))
-	logoFr := bundleFr.Resources.GetMatch("logo*")
+	require.Equal(t, 1, len(bundleFr.Resources()))
+	logoFr := bundleFr.Resources().GetMatch("logo*")
 	require.NotNil(t, logoFr)
-	require.Equal(t, "/blog/fr/bundles/b1/logo.png", logoFr.RelPermalink())
+	b.AssertFileContent("public/fr/bundles/b1/index.html", "Resources: image/png: /blog/fr/bundles/b1/logo.png")
 	b.AssertFileContent("public/fr/bundles/b1/logo.png", "PNG Data")
 
-	bundleEn := enSite.getPage(KindPage, "bundles/b1/index.en.md")
+	bundleEn := enSite.getPage(page.KindPage, "bundles/b1/index.en.md")
 	require.NotNil(t, bundleEn)
-	require.Equal(t, "/blog/en/bundles/b1/", bundleEn.RelPermalink())
-	require.Equal(t, 1, len(bundleEn.Resources))
-	logoEn := bundleEn.Resources.GetMatch("logo*")
+	b.AssertFileContent("public/en/bundles/b1/index.html", "RelPermalink: /blog/en/bundles/b1/|")
+	require.Equal(t, 1, len(bundleEn.Resources()))
+	logoEn := bundleEn.Resources().GetMatch("logo*")
 	require.NotNil(t, logoEn)
-	require.Equal(t, "/blog/en/bundles/b1/logo.png", logoEn.RelPermalink())
+	b.AssertFileContent("public/en/bundles/b1/index.html", "Resources: image/png: /blog/en/bundles/b1/logo.png")
 	b.AssertFileContent("public/en/bundles/b1/logo.png", "PNG Data")
 
 }
@@ -442,13 +427,13 @@ func TestMultiSitesRebuild(t *testing.T) {
 	sites := b.H.Sites
 	fs := b.Fs
 
-	b.AssertFileContent("public/en/sect/doc2/index.html", "Single: doc2|Hello|en|\n\n<h1 id=\"doc2\">doc2</h1>\n\n<p><em>some content</em>")
+	b.AssertFileContent("public/en/sect/doc2/index.html", "Single: doc2|Hello|en|", "\n\n<h1 id=\"doc2\">doc2</h1>\n\n<p><em>some content</em>")
 
 	enSite := sites[0]
 	frSite := sites[1]
 
-	assert.Len(enSite.RegularPages, 5)
-	assert.Len(frSite.RegularPages, 4)
+	assert.Len(enSite.RegularPages(), 5)
+	assert.Len(frSite.RegularPages(), 4)
 
 	// Verify translations
 	b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Hello")
@@ -458,6 +443,10 @@ func TestMultiSitesRebuild(t *testing.T) {
 	b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Shortcode: Bonjour")
 	b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Shortcode: Hello")
 
+	homeEn := enSite.getPage(page.KindHome)
+	require.NotNil(t, homeEn)
+	assert.Len(homeEn.Translations(), 3)
+
 	contentFs := b.H.BaseFs.Content.Fs
 
 	for i, this := range []struct {
@@ -478,15 +467,15 @@ func TestMultiSitesRebuild(t *testing.T) {
 			},
 			[]fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc2.en.md"), Op: fsnotify.Remove}},
 			func(t *testing.T) {
-				assert.Len(enSite.RegularPages, 4, "1 en removed")
+				assert.Len(enSite.RegularPages(), 4, "1 en removed")
 
 				// Check build stats
-				require.Equal(t, 1, enSite.draftCount, "Draft")
-				require.Equal(t, 1, enSite.futureCount, "Future")
-				require.Equal(t, 1, enSite.expiredCount, "Expired")
-				require.Equal(t, 0, frSite.draftCount, "Draft")
-				require.Equal(t, 1, frSite.futureCount, "Future")
-				require.Equal(t, 1, frSite.expiredCount, "Expired")
+				require.Equal(t, 1, enSite.buildStats.draftCount, "Draft")
+				require.Equal(t, 1, enSite.buildStats.futureCount, "Future")
+				require.Equal(t, 1, enSite.buildStats.expiredCount, "Expired")
+				require.Equal(t, 0, frSite.buildStats.draftCount, "Draft")
+				require.Equal(t, 1, frSite.buildStats.futureCount, "Future")
+				require.Equal(t, 1, frSite.buildStats.expiredCount, "Expired")
 			},
 		},
 		{
@@ -501,12 +490,12 @@ func TestMultiSitesRebuild(t *testing.T) {
 				{Name: filepath.FromSlash("content/new1.fr.md"), Op: fsnotify.Create},
 			},
 			func(t *testing.T) {
-				assert.Len(enSite.RegularPages, 6)
-				assert.Len(enSite.AllPages, 34)
-				assert.Len(frSite.RegularPages, 5)
-				require.Equal(t, "new_fr_1", frSite.RegularPages[3].title)
-				require.Equal(t, "new_en_2", enSite.RegularPages[0].title)
-				require.Equal(t, "new_en_1", enSite.RegularPages[1].title)
+				assert.Len(enSite.RegularPages(), 6)
+				assert.Len(enSite.AllPages(), 34)
+				assert.Len(frSite.RegularPages(), 5)
+				require.Equal(t, "new_fr_1", frSite.RegularPages()[3].Title())
+				require.Equal(t, "new_en_2", enSite.RegularPages()[0].Title())
+				require.Equal(t, "new_en_1", enSite.RegularPages()[1].Title())
 
 				rendered := readDestination(t, fs, "public/en/new1/index.html")
 				require.True(t, strings.Contains(rendered, "new_en_1"), rendered)
@@ -521,7 +510,7 @@ func TestMultiSitesRebuild(t *testing.T) {
 			},
 			[]fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc1.en.md"), Op: fsnotify.Write}},
 			func(t *testing.T) {
-				assert.Len(enSite.RegularPages, 6)
+				assert.Len(enSite.RegularPages(), 6)
 				doc1 := readDestination(t, fs, "public/en/sect/doc1-slug/index.html")
 				require.True(t, strings.Contains(doc1, "CHANGED"), doc1)
 
@@ -539,8 +528,8 @@ func TestMultiSitesRebuild(t *testing.T) {
 				{Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Rename},
 			},
 			func(t *testing.T) {
-				assert.Len(enSite.RegularPages, 6, "Rename")
-				require.Equal(t, "new_en_1", enSite.RegularPages[1].title)
+				assert.Len(enSite.RegularPages(), 6, "Rename")
+				require.Equal(t, "new_en_1", enSite.RegularPages()[1].Title())
 				rendered := readDestination(t, fs, "public/en/new1renamed/index.html")
 				require.True(t, strings.Contains(rendered, "new_en_1"), rendered)
 			}},
@@ -554,9 +543,9 @@ func TestMultiSitesRebuild(t *testing.T) {
 			},
 			[]fsnotify.Event{{Name: filepath.FromSlash("layouts/_default/single.html"), Op: fsnotify.Write}},
 			func(t *testing.T) {
-				assert.Len(enSite.RegularPages, 6)
-				assert.Len(enSite.AllPages, 34)
-				assert.Len(frSite.RegularPages, 5)
+				assert.Len(enSite.RegularPages(), 6)
+				assert.Len(enSite.AllPages(), 34)
+				assert.Len(frSite.RegularPages(), 5)
 				doc1 := readDestination(t, fs, "public/en/sect/doc1-slug/index.html")
 				require.True(t, strings.Contains(doc1, "Template Changed"), doc1)
 			},
@@ -571,18 +560,18 @@ func TestMultiSitesRebuild(t *testing.T) {
 			},
 			[]fsnotify.Event{{Name: filepath.FromSlash("i18n/fr.yaml"), Op: fsnotify.Write}},
 			func(t *testing.T) {
-				assert.Len(enSite.RegularPages, 6)
-				assert.Len(enSite.AllPages, 34)
-				assert.Len(frSite.RegularPages, 5)
+				assert.Len(enSite.RegularPages(), 6)
+				assert.Len(enSite.AllPages(), 34)
+				assert.Len(frSite.RegularPages(), 5)
 				docEn := readDestination(t, fs, "public/en/sect/doc1-slug/index.html")
 				require.True(t, strings.Contains(docEn, "Hello"), "No Hello")
 				docFr := readDestination(t, fs, "public/fr/sect/doc1/index.html")
 				require.True(t, strings.Contains(docFr, "Salut"), "No Salut")
 
-				homeEn := enSite.getPage(KindHome)
+				homeEn := enSite.getPage(page.KindHome)
 				require.NotNil(t, homeEn)
 				assert.Len(homeEn.Translations(), 3)
-				require.Equal(t, "fr", homeEn.Translations()[0].Lang())
+				require.Equal(t, "fr", homeEn.Translations()[0].Language().Lang)
 
 			},
 		},
@@ -595,9 +584,9 @@ func TestMultiSitesRebuild(t *testing.T) {
 				{Name: filepath.FromSlash("layouts/shortcodes/shortcode.html"), Op: fsnotify.Write},
 			},
 			func(t *testing.T) {
-				assert.Len(enSite.RegularPages, 6)
-				assert.Len(enSite.AllPages, 34)
-				assert.Len(frSite.RegularPages, 5)
+				assert.Len(enSite.RegularPages(), 6)
+				assert.Len(enSite.AllPages(), 34)
+				assert.Len(frSite.RegularPages(), 5)
 				b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Modified Shortcode: Salut")
 				b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Modified Shortcode: Hello")
 			},
@@ -617,23 +606,6 @@ func TestMultiSitesRebuild(t *testing.T) {
 		this.assertFunc(t)
 	}
 
-	// Check that the drafts etc. are not built/processed/rendered.
-	assertShouldNotBuild(t, b.H)
-
-}
-
-func assertShouldNotBuild(t *testing.T, sites *HugoSites) {
-	s := sites.Sites[0]
-
-	for _, p := range s.rawAllPages {
-		// No HTML when not processed
-		require.Equal(t, p.shouldBuild(), bytes.Contains(p.workContent, []byte("</")), p.BaseFileName()+": "+string(p.workContent))
-
-		require.Equal(t, p.shouldBuild(), p.content() != "", fmt.Sprintf("%v:%v", p.content(), p.shouldBuild()))
-
-		require.Equal(t, p.shouldBuild(), p.content() != "", p.BaseFileName())
-
-	}
 }
 
 func TestAddNewLanguage(t *testing.T) {
@@ -671,31 +643,32 @@ title = "Svenska"
 	enSite := sites.Sites[0]
 	svSite := sites.Sites[1]
 	frSite := sites.Sites[2]
-	require.True(t, enSite.Language.Lang == "en", enSite.Language.Lang)
-	require.True(t, svSite.Language.Lang == "sv", svSite.Language.Lang)
-	require.True(t, frSite.Language.Lang == "fr", frSite.Language.Lang)
+	require.True(t, enSite.language.Lang == "en", enSite.language.Lang)
+	require.True(t, svSite.language.Lang == "sv", svSite.language.Lang)
+	require.True(t, frSite.language.Lang == "fr", frSite.language.Lang)
 
-	homeEn := enSite.getPage(KindHome)
+	homeEn := enSite.getPage(page.KindHome)
 	require.NotNil(t, homeEn)
 	require.Len(t, homeEn.Translations(), 4)
-	require.Equal(t, "sv", homeEn.Translations()[0].Lang())
 
-	require.Len(t, enSite.RegularPages, 5)
-	require.Len(t, frSite.RegularPages, 4)
+	require.Equal(t, "sv", homeEn.Translations()[0].Language().Lang)
+
+	require.Len(t, enSite.RegularPages(), 5)
+	require.Len(t, frSite.RegularPages(), 4)
 
 	// Veriy Swedish site
-	require.Len(t, svSite.RegularPages, 1)
-	svPage := svSite.RegularPages[0]
+	require.Len(t, svSite.RegularPages(), 1)
+	svPage := svSite.RegularPages()[0]
 
-	require.Equal(t, "Swedish Contentfile", svPage.title)
-	require.Equal(t, "sv", svPage.Lang())
+	require.Equal(t, "Swedish Contentfile", svPage.Title())
+	require.Equal(t, "sv", svPage.Language().Lang)
 	require.Len(t, svPage.Translations(), 2)
 	require.Len(t, svPage.AllTranslations(), 3)
-	require.Equal(t, "en", svPage.Translations()[0].Lang())
+	require.Equal(t, "en", svPage.Translations()[0].Language().Lang)
 
 	// Regular pages have no children
-	require.Len(t, svPage.Pages, 0)
-	require.Len(t, svPage.data["Pages"], 0)
+	require.Len(t, svPage.Pages(), 0)
+	require.Len(t, svPage.Data().(page.Data).Pages(), 0)
 
 }
 
@@ -782,12 +755,12 @@ Some text. Some more text.
 	content = append(content, []string{"s2/_index.md", fmt.Sprintf(contentTempl, defaultOutputs, fmt.Sprintf("S %d", 2), 2, true)}...)
 
 	b.WithSimpleConfigFile()
-	b.WithTemplates("layouts/_default/single.html", `Single: {{ .Content }}`)
+	b.WithTemplates("layouts/_default/single.html", `Single: {{ .Content }}|RelPermalink: {{ .RelPermalink }}|Permalink: {{ .Permalink }}`)
 	b.WithTemplates("layouts/_default/myview.html", `View: {{ len .Content }}`)
-	b.WithTemplates("layouts/_default/single.json", `Single JSON: {{ .Content }}`)
+	b.WithTemplates("layouts/_default/single.json", `Single JSON: {{ .Content }}|RelPermalink: {{ .RelPermalink }}|Permalink: {{ .Permalink }}`)
 	b.WithTemplates("layouts/_default/list.html", `
 Page: {{ .Paginator.PageNumber }}
-P: {{ path.Join .Path }}
+P: {{ with .File }}{{ path.Join .Path }}{{ end }}
 List: {{ len .Paginator.Pages }}|List Content: {{ len .Content }}
 {{ $shuffled :=  where .Site.RegularPages "Params.multioutput" true | shuffle }}
 {{ $first5 := $shuffled | first 5 }}
@@ -810,7 +783,7 @@ END
 			if i%10 == 0 {
 				section = "s2"
 			}
-			checkContent(b, fmt.Sprintf("public/%s/page%d/index.html", section, i), 8343, contentMatchers...)
+			checkContent(b, fmt.Sprintf("public/%s/page%d/index.html", section, i), contentMatchers...)
 		}
 	}
 
@@ -819,29 +792,27 @@ END
 		if i%10 == 0 {
 			section = "s2"
 		}
-		checkContent(b, fmt.Sprintf("public/%s/page%d/index.json", section, i), 8348, contentMatchers...)
+		checkContent(b, fmt.Sprintf("public/%s/page%d/index.json", section, i), contentMatchers...)
 	}
 
-	checkContent(b, "public/s1/index.html", 184, "P: s1/_index.md\nList: 10|List Content: 8335\n\n\nL1: 500 L2: 5\n\nRender 0: View: 8335\n\nRender 1: View: 8335\n\nRender 2: View: 8335\n\nRender 3: View: 8335\n\nRender 4: View: 8335\n\nEND\n")
-	checkContent(b, "public/s2/index.html", 184, "P: s2/_index.md\nList: 10|List Content: 8335", "Render 4: View: 8335\n\nEND")
-	checkContent(b, "public/index.html", 181, "P: _index.md\nList: 10|List Content: 8335", "4: View: 8335\n\nEND")
+	checkContent(b, "public/s1/index.html", "P: s1/_index.md\nList: 10|List Content: 8335\n\n\nL1: 500 L2: 5\n\nRender 0: View: 8335\n\nRender 1: View: 8335\n\nRender 2: View: 8335\n\nRender 3: View: 8335\n\nRender 4: View: 8335\n\nEND\n")
+	checkContent(b, "public/s2/index.html", "P: s2/_index.md\nList: 10|List Content: 8335", "Render 4: View: 8335\n\nEND")
+	checkContent(b, "public/index.html", "P: _index.md\nList: 10|List Content: 8335", "4: View: 8335\n\nEND")
 
-	// Chek paginated pages
+	// Check paginated pages
 	for i := 2; i <= 9; i++ {
-		checkContent(b, fmt.Sprintf("public/page/%d/index.html", i), 181, fmt.Sprintf("Page: %d", i), "Content: 8335\n\n\nL1: 500 L2: 5\n\nRender 0: View: 8335", "Render 4: View: 8335\n\nEND")
+		checkContent(b, fmt.Sprintf("public/page/%d/index.html", i), fmt.Sprintf("Page: %d", i), "Content: 8335\n\n\nL1: 500 L2: 5\n\nRender 0: View: 8335", "Render 4: View: 8335\n\nEND")
 	}
 }
 
-func checkContent(s *sitesBuilder, filename string, length int, matches ...string) {
+func checkContent(s *sitesBuilder, filename string, matches ...string) {
 	content := readDestination(s.T, s.Fs, filename)
 	for _, match := range matches {
 		if !strings.Contains(content, match) {
 			s.Fatalf("No match for %q in content for %s\n%q", match, filename, content)
 		}
 	}
-	if len(content) != length {
-		s.Fatalf("got %d expected %d", len(content), length)
-	}
+
 }
 
 func TestTableOfContentsInShortcodes(t *testing.T) {
@@ -850,17 +821,36 @@ func TestTableOfContentsInShortcodes(t *testing.T) {
 	b := newMultiSiteTestDefaultBuilder(t)
 
 	b.WithTemplatesAdded("layouts/shortcodes/toc.html", tocShortcode)
+	b.WithTemplatesAdded("layouts/shortcodes/wrapper.html", "{{ .Inner }}")
 	b.WithContent("post/simple.en.md", tocPageSimple)
+	b.WithContent("post/variants1.en.md", tocPageVariants1)
+	b.WithContent("post/variants2.en.md", tocPageVariants2)
+
 	b.WithContent("post/withSCInHeading.en.md", tocPageWithShortcodesInHeadings)
 
 	b.CreateSites().Build(BuildCfg{})
 
-	b.AssertFileContent("public/en/post/simple/index.html", tocPageSimpleExpected)
+	b.AssertFileContent("public/en/post/simple/index.html",
+		tocPageSimpleExpected,
+		// Make sure it is inserted twice
+		`TOC1: <nav id="TableOfContents">`,
+		`TOC2: <nav id="TableOfContents">`,
+	)
+
+	b.AssertFileContentFn("public/en/post/variants1/index.html", func(s string) bool {
+		return strings.Count(s, "TableOfContents") == 4
+	})
+	b.AssertFileContentFn("public/en/post/variants2/index.html", func(s string) bool {
+		return strings.Count(s, "TableOfContents") == 6
+	})
+
 	b.AssertFileContent("public/en/post/withSCInHeading/index.html", tocPageWithShortcodesInHeadingsExpected)
 }
 
 var tocShortcode = `
-{{ .Page.TableOfContents }}
+TOC1: {{ .Page.TableOfContents }}
+
+TOC2: {{ .Page.TableOfContents }}
 `
 
 func TestSelfReferencedContentInShortcode(t *testing.T) {
@@ -901,6 +891,41 @@ Even more text.
 Lorem ipsum...
 `
 
+var tocPageVariants1 = `---
+title: tocTest
+publishdate: "2000-01-01"
+---
+Variant 1:
+{{% wrapper %}}
+{{< toc >}}
+{{% /wrapper %}}
+# Heading 1
+
+Variant 3:
+{{% toc %}}
+
+`
+
+var tocPageVariants2 = `---
+title: tocTest
+publishdate: "2000-01-01"
+---
+Variant 1:
+{{% wrapper %}}
+{{< toc >}}
+{{% /wrapper %}}
+# Heading 1
+
+Variant 2:
+{{< wrapper >}}
+{{< toc >}}
+{{< /wrapper >}}
+
+Variant 3:
+{{% toc %}}
+
+`
+
 var tocPageSimpleExpected = `<nav id="TableOfContents">
 <ul>
 <li><a href="#1">Heading 1</a>
@@ -958,6 +983,7 @@ paginate = 1
 disablePathToLower = true
 defaultContentLanguage = "{{ .DefaultContentLanguage }}"
 defaultContentLanguageInSubdir = {{ .DefaultContentLanguageInSubdir }}
+enableRobotsTXT = true
 
 [permalinks]
 other = "/somewhere/else/:filename"
@@ -1015,6 +1041,7 @@ disablePathToLower: true
 paginate: 1
 defaultContentLanguage: "{{ .DefaultContentLanguage }}"
 defaultContentLanguageInSubdir: {{ .DefaultContentLanguageInSubdir }}
+enableRobotsTXT: true
 
 permalinks:
     other: "/somewhere/else/:filename"
@@ -1073,6 +1100,7 @@ var multiSiteJSONConfigTemplate = `
   "disablePathToLower": true,
   "defaultContentLanguage": "{{ .DefaultContentLanguage }}",
   "defaultContentLanguageInSubdir": true,
+  "enableRobotsTXT": true,
   "permalinks": {
     "other": "/somewhere/else/:filename"
   },
@@ -1170,7 +1198,23 @@ func readFileFromFs(t testing.TB, fs afero.Fs, filename string) string {
 	b, err := afero.ReadFile(fs, filename)
 	if err != nil {
 		// Print some debug info
-		root := strings.Split(filename, helpers.FilePathSeparator)[0]
+		hadSlash := strings.HasPrefix(filename, helpers.FilePathSeparator)
+		start := 0
+		if hadSlash {
+			start = 1
+		}
+		end := start + 1
+
+		parts := strings.Split(filename, helpers.FilePathSeparator)
+		if parts[start] == "work" {
+			end++
+		}
+
+		root := filepath.Join(parts[start:end]...)
+		if hadSlash {
+			root = helpers.FilePathSeparator + root
+		}
+
 		helpers.PrintFs(fs, root, os.Stdout)
 		Fatalf(t, "Failed to read file: %s", err)
 	}
@@ -1262,8 +1306,8 @@ NOTE: slug should be used as URL
 title: doc1
 weight: 1
 plaques:
- - frtag1
- - frtag2
+ - FRtag1
+ - FRtag2
 publishdate: "2000-01-04"
 ---
 # doc1
@@ -1293,7 +1337,7 @@ aliases: [/en/al/alias1,/al/alias2/]
 tags:
  - tag2
  - tag1
-url: /superbob
+url: /superbob/
 ---
 # doc3
 *some content*
@@ -1303,7 +1347,7 @@ NOTE: third 'en' doc, should trigger pagination on home page.
 title: doc4
 weight: 4
 plaques:
- - frtag1
+ - FRtag1
 publishdate: "2000-01-05"
 ---
 # doc4
diff --git a/hugolib/hugo_sites_multihost_test.go b/hugolib/hugo_sites_multihost_test.go
index 83d6bfc9e9a..999d945594c 100644
--- a/hugolib/hugo_sites_multihost_test.go
+++ b/hugolib/hugo_sites_multihost_test.go
@@ -3,6 +3,8 @@ package hugolib
 import (
 	"testing"
 
+	"github.com/gohugoio/hugo/resources/page"
+
 	"github.com/stretchr/testify/require"
 )
 
@@ -55,7 +57,7 @@ languageName = "Nynorsk"
 
 	s1 := b.H.Sites[0]
 
-	s1h := s1.getPage(KindHome)
+	s1h := s1.getPage(page.KindHome)
 	assert.True(s1h.IsTranslated())
 	assert.Len(s1h.Translations(), 2)
 	assert.Equal("https://example.com/docs/", s1h.Permalink())
@@ -66,9 +68,8 @@ languageName = "Nynorsk"
 	// For multihost, we never want any content in the root.
 	//
 	// check url in front matter:
-	pageWithURLInFrontMatter := s1.getPage(KindPage, "sect/doc3.en.md")
+	pageWithURLInFrontMatter := s1.getPage(page.KindPage, "sect/doc3.en.md")
 	assert.NotNil(pageWithURLInFrontMatter)
-	assert.Equal("/superbob", pageWithURLInFrontMatter.URL())
 	assert.Equal("/docs/superbob/", pageWithURLInFrontMatter.RelPermalink())
 	b.AssertFileContent("public/en/superbob/index.html", "doc3|Hello|en")
 
@@ -78,7 +79,7 @@ languageName = "Nynorsk"
 
 	s2 := b.H.Sites[1]
 
-	s2h := s2.getPage(KindHome)
+	s2h := s2.getPage(page.KindHome)
 	assert.Equal("https://example.fr/", s2h.Permalink())
 
 	b.AssertFileContent("public/fr/index.html", "French Home Page", "String Resource: /docs/text/pipes.txt")
@@ -94,22 +95,19 @@ languageName = "Nynorsk"
 
 	// Check bundles
 
-	bundleEn := s1.getPage(KindPage, "bundles/b1/index.en.md")
+	bundleEn := s1.getPage(page.KindPage, "bundles/b1/index.en.md")
 	require.NotNil(t, bundleEn)
 	require.Equal(t, "/docs/bundles/b1/", bundleEn.RelPermalink())
-	require.Equal(t, 1, len(bundleEn.Resources))
-	logoEn := bundleEn.Resources.GetMatch("logo*")
-	require.NotNil(t, logoEn)
-	require.Equal(t, "/docs/bundles/b1/logo.png", logoEn.RelPermalink())
+	require.Equal(t, 1, len(bundleEn.Resources()))
+
 	b.AssertFileContent("public/en/bundles/b1/logo.png", "PNG Data")
+	b.AssertFileContent("public/en/bundles/b1/index.html", " image/png: /docs/bundles/b1/logo.png")
 
-	bundleFr := s2.getPage(KindPage, "bundles/b1/index.md")
+	bundleFr := s2.getPage(page.KindPage, "bundles/b1/index.md")
 	require.NotNil(t, bundleFr)
 	require.Equal(t, "/bundles/b1/", bundleFr.RelPermalink())
-	require.Equal(t, 1, len(bundleFr.Resources))
-	logoFr := bundleFr.Resources.GetMatch("logo*")
-	require.NotNil(t, logoFr)
-	require.Equal(t, "/bundles/b1/logo.png", logoFr.RelPermalink())
+	require.Equal(t, 1, len(bundleFr.Resources()))
 	b.AssertFileContent("public/fr/bundles/b1/logo.png", "PNG Data")
+	b.AssertFileContent("public/fr/bundles/b1/index.html", " image/png: /bundles/b1/logo.png")
 
 }
diff --git a/hugolib/hugo_smoke_test.go b/hugolib/hugo_smoke_test.go
new file mode 100644
index 00000000000..d5b8861ce6b
--- /dev/null
+++ b/hugolib/hugo_smoke_test.go
@@ -0,0 +1,303 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"fmt"
+	"strings"
+	"testing"
+
+	"github.com/stretchr/testify/require"
+)
+
+func TestSmoke(t *testing.T) {
+	t.Parallel()
+
+	assert := require.New(t)
+
+	const configFile = `
+baseURL = "https://example.com"
+title = "Simple Site"
+rssLimit = 3
+defaultContentLanguage = "en"
+enableRobotsTXT = true
+
+[languages]
+[languages.en]
+weight = 1
+title = "In English"
+[languages.no]
+weight = 2
+title = "På norsk"
+
+[params]
+hugo = "Rules!"
+
+[outputs]
+  home = ["HTML", "JSON", "CSV", "RSS"]
+
+`
+
+	const pageContentAndSummaryDivider = `---
+title: Page with outputs
+hugo: "Rocks!"
+outputs: ["HTML", "JSON"]
+tags: [ "hugo" ]
+aliases: [ "/a/b/c" ]
+---
+
+This is summary.
+
+<!--more--> 
+
+This is content with some shortcodes.
+
+Shortcode 1: {{< sc >}}.
+Shortcode 2: {{< sc >}}.
+
+`
+
+	const pageContentWithMarkdownShortcodes = `---
+title: Page with markdown shortcode
+hugo: "Rocks!"
+outputs: ["HTML", "JSON"]
+---
+
+This is summary.
+
+<!--more--> 
+
+This is content[^a].
+
+# Header above
+
+{{% markdown-shortcode %}}
+# Header inside
+
+Some **markdown**.[^b]
+
+{{% /markdown-shortcode %}}
+
+# Heder below
+
+Some more content[^c].
+
+Footnotes:
+
+[^a]: Fn 1
+[^b]: Fn 2
+[^c]: Fn 3
+
+`
+
+	var pageContentAutoSummary = strings.Replace(pageContentAndSummaryDivider, "<!--more-->", "", 1)
+
+	b := newTestSitesBuilder(t).WithConfigFile("toml", configFile)
+	b.WithTemplatesAdded("shortcodes/markdown-shortcode.html", `
+Some **Markdown** in shortcode.
+
+{{ .Inner }}
+
+
+	
+`)
+
+	b.WithTemplatesAdded("shortcodes/markdown-shortcode.json", `
+Some **Markdown** in JSON shortcode.
+{{ .Inner }}
+
+`)
+
+	for i := 1; i <= 11; i++ {
+		if i%2 == 0 {
+			b.WithContent(fmt.Sprintf("blog/page%d.md", i), pageContentAndSummaryDivider)
+			b.WithContent(fmt.Sprintf("blog/page%d.no.md", i), pageContentAndSummaryDivider)
+		} else {
+			b.WithContent(fmt.Sprintf("blog/page%d.md", i), pageContentAutoSummary)
+		}
+	}
+
+	for i := 1; i <= 5; i++ {
+		// Root section pages
+		b.WithContent(fmt.Sprintf("root%d.md", i), pageContentAutoSummary)
+	}
+
+	// https://github.com/gohugoio/hugo/issues/4695
+	b.WithContent("blog/markyshort.md", pageContentWithMarkdownShortcodes)
+
+	// Add one bundle
+	b.WithContent("blog/mybundle/index.md", pageContentAndSummaryDivider)
+	b.WithContent("blog/mybundle/mydata.csv", "Bundled CSV")
+
+	const (
+		commonPageTemplate            = `|{{ .Kind }}|{{ .Title }}|{{ .Path }}|{{ .Summary }}|{{ .Content }}|RelPermalink: {{ .RelPermalink }}|WordCount: {{ .WordCount }}|Pages: {{ .Pages }}|Data Pages: Pages({{ len .Data.Pages }})|Resources: {{ len .Resources }}|Summary: {{ .Summary }}`
+		commonPaginatorTemplate       = `|Paginator: {{ with .Paginator }}{{ .PageNumber }}{{ else }}NIL{{ end }}`
+		commonListTemplateNoPaginator = `|{{ range $i, $e := (.Pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}`
+		commonListTemplate            = commonPaginatorTemplate + `|{{ range $i, $e := (.Pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}`
+		commonShortcodeTemplate       = `|{{ .Name }}|{{ .Ordinal }}|{{ .Page.Summary }}|{{ .Page.Content }}|WordCount: {{ .Page.WordCount }}`
+		prevNextTemplate              = `|Prev: {{ with .Prev }}{{ .RelPermalink }}{{ end }}|Next: {{ with .Next }}{{ .RelPermalink }}{{ end }}`
+		prevNextInSectionTemplate     = `|PrevInSection: {{ with .PrevInSection }}{{ .RelPermalink }}{{ end }}|NextInSection: {{ with .NextInSection }}{{ .RelPermalink }}{{ end }}`
+		paramsTemplate                = `|Params: {{ .Params.hugo }}`
+		treeNavTemplate               = `|CurrentSection: {{ .CurrentSection }}`
+	)
+
+	b.WithTemplates(
+		"_default/list.html", "HTML: List"+commonPageTemplate+commonListTemplate+"|First Site: {{ .Sites.First.Title }}",
+		"_default/list.json", "JSON: List"+commonPageTemplate+commonListTemplateNoPaginator,
+		"_default/list.csv", "CSV: List"+commonPageTemplate+commonListTemplateNoPaginator,
+		"_default/single.html", "HTML: Single"+commonPageTemplate+prevNextTemplate+prevNextInSectionTemplate+treeNavTemplate,
+		"_default/single.json", "JSON: Single"+commonPageTemplate,
+
+		// For .Render test
+		"_default/li.html", `HTML: LI|{{ strings.Contains .Content "HTML: Shortcode: sc" }}`+paramsTemplate,
+		"_default/li.json", `JSON: LI|{{ strings.Contains .Content "JSON: Shortcode: sc" }}`+paramsTemplate,
+		"_default/li.csv", `CSV: LI|{{ strings.Contains .Content "CSV: Shortcode: sc" }}`+paramsTemplate,
+
+		"404.html", "{{ .Kind }}|{{ .Title }}|Page not found",
+
+		"shortcodes/sc.html", "HTML: Shortcode: "+commonShortcodeTemplate,
+		"shortcodes/sc.json", "JSON: Shortcode: "+commonShortcodeTemplate,
+		"shortcodes/sc.csv", "CSV: Shortcode: "+commonShortcodeTemplate,
+	)
+
+	b.CreateSites().Build(BuildCfg{})
+
+	b.AssertFileContent("public/blog/page1/index.html",
+		"This is content with some shortcodes.",
+		"Page with outputs",
+		"Pages: Pages(0)",
+		"RelPermalink: /blog/page1/|",
+		"Shortcode 1: HTML: Shortcode: |sc|0|||WordCount: 0.",
+		"Shortcode 2: HTML: Shortcode: |sc|1|||WordCount: 0.",
+		"Prev: /blog/page10/|Next: /blog/mybundle/",
+		"PrevInSection: /blog/page10/|NextInSection: /blog/mybundle/",
+		"Summary: This is summary.",
+		"CurrentSection: Page(/blog)",
+	)
+
+	b.AssertFileContent("public/blog/page1/index.json",
+		"JSON: Single|page|Page with outputs|",
+		"SON: Shortcode: |sc|0||")
+
+	b.AssertFileContent("public/index.html",
+		"home|In English",
+		"Site params: Rules",
+		"Pages: Pages(18)|Data Pages: Pages(18)",
+		"Paginator: 1",
+		"First Site: In English",
+		"RelPermalink: /",
+	)
+
+	b.AssertFileContent("public/no/index.html", "home|På norsk", "RelPermalink: /no/")
+
+	// Check RSS
+	rssHome := b.FileContent("public/index.xml")
+	assert.Contains(rssHome, `<atom:link href="https://example.com/index.xml" rel="self" type="application/rss+xml" />`)
+	assert.Equal(3, strings.Count(rssHome, "<item>")) // rssLimit = 3
+
+	// .Render should use template/content from the current output format
+	// even if that output format isn't configured for that page.
+	b.AssertFileContent(
+		"public/index.json",
+		"Render 0: page|JSON: LI|false|Params: Rocks!",
+	)
+
+	b.AssertFileContent(
+		"public/index.html",
+		"Render 0: page|HTML: LI|false|Params: Rocks!|",
+	)
+
+	b.AssertFileContent(
+		"public/index.csv",
+		"Render 0: page|CSV: LI|false|Params: Rocks!|",
+	)
+
+	// Check bundled resources
+	b.AssertFileContent(
+		"public/blog/mybundle/index.html",
+		"Resources: 1",
+	)
+
+	// Check pages in root section
+	b.AssertFileContent(
+		"public/root3/index.html",
+		"Single|page|Page with outputs|root3.md|",
+		"Prev: /root4/|Next: /root2/|PrevInSection: /root4/|NextInSection: /root2/",
+	)
+
+	b.AssertFileContent(
+		"public/root3/index.json", "Shortcode 1: JSON:")
+
+	// Paginators
+	b.AssertFileContent("public/page/1/index.html", `rel="canonical" href="https://example.com/"`)
+	b.AssertFileContent("public/page/2/index.html", "HTML: List|home|In English|", "Paginator: 2")
+
+	// 404
+	b.AssertFileContent("public/404.html", "404|404 Page not found")
+
+	// Sitemaps
+	b.AssertFileContent("public/en/sitemap.xml", "<loc>https://example.com/blog/</loc>")
+	b.AssertFileContent("public/no/sitemap.xml", `hreflang="no"`)
+
+	b.AssertFileContent("public/sitemap.xml", "<loc>https://example.com/en/sitemap.xml</loc>", "<loc>https://example.com/no/sitemap.xml</loc>")
+
+	// robots.txt
+	b.AssertFileContent("public/robots.txt", `User-agent: *`)
+
+	// Aliases
+	b.AssertFileContent("public/a/b/c/index.html", `refresh`)
+
+	// Markdown vs shortcodes
+	// Check that all footnotes are grouped (even those from inside the shortcode)
+	b.AssertFileContentRe("public/blog/markyshort/index.html", `Footnotes:.*<ol>.*Fn 1.*Fn 2.*Fn 3.*</ol>`)
+
+}
+
+// https://github.com/golang/go/issues/30286
+func TestDataRace(t *testing.T) {
+
+	const page = `
+---
+title: "The Page"
+outputs: ["HTML", "JSON"]
+---	
+
+The content.
+	
+
+	`
+
+	b := newTestSitesBuilder(t).WithSimpleConfigFile()
+	for i := 1; i <= 50; i++ {
+		b.WithContent(fmt.Sprintf("blog/page%d.md", i), page)
+	}
+
+	b.WithContent("_index.md", `
+---
+title: "The Home"
+outputs: ["HTML", "JSON", "CSV", "RSS"]
+---	
+
+The content.
+	
+
+`)
+
+	commonTemplate := `{{ .Data.Pages }}`
+
+	b.WithTemplatesAdded("_default/single.html", "HTML Single: "+commonTemplate)
+	b.WithTemplatesAdded("_default/list.html", "HTML List: "+commonTemplate)
+
+	b.CreateSites().Build(BuildCfg{})
+}
diff --git a/hugolib/language_content_dir_test.go b/hugolib/language_content_dir_test.go
index 577fdfaeb77..ad1e1fb5345 100644
--- a/hugolib/language_content_dir_test.go
+++ b/hugolib/language_content_dir_test.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -19,6 +19,8 @@ import (
 	"path/filepath"
 	"testing"
 
+	"github.com/gohugoio/hugo/resources/page"
+
 	"github.com/stretchr/testify/require"
 )
 
@@ -99,15 +101,19 @@ Content.
 	section := "sect"
 
 	var contentRoot = func(lang string) string {
-		contentRoot := "content/main"
-
 		switch lang {
 		case "nn":
-			contentRoot = "content/norsk"
+			return "content/norsk"
 		case "sv":
-			contentRoot = "content/svensk"
+			return "content/svensk"
+		default:
+			return "content/main"
 		}
-		return contentRoot + "/" + section
+
+	}
+
+	var contentSectionRoot = func(lang string) string {
+		return contentRoot(lang) + "/" + section
 	}
 
 	for _, lang := range []string{"en", "nn", "sv"} {
@@ -124,7 +130,7 @@ Content.
 			}
 
 			base := fmt.Sprintf("p-%s-%d", lang, j)
-			slug := fmt.Sprintf("%s", base)
+			slug := base
 			langID := ""
 
 			if lang == "sv" && j%4 == 0 {
@@ -139,7 +145,7 @@ Content.
 
 			slug += langID
 
-			contentRoot := contentRoot(lang)
+			contentRoot := contentSectionRoot(lang)
 
 			filename := filepath.Join(contentRoot, fmt.Sprintf("page%d%s.md", j, langID))
 			contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, slug, slug, j))
@@ -148,7 +154,7 @@ Content.
 
 	// Put common translations in all of them
 	for i, lang := range []string{"en", "nn", "sv"} {
-		contentRoot := contentRoot(lang)
+		contentRoot := contentSectionRoot(lang)
 
 		slug := fmt.Sprintf("common_%s", lang)
 
@@ -173,7 +179,7 @@ Content.
 
 	// Add a bundle with some images
 	for i, lang := range []string{"en", "nn", "sv"} {
-		contentRoot := contentRoot(lang)
+		contentRoot := contentSectionRoot(lang)
 		slug := fmt.Sprintf("bundle_%s", lang)
 		filename := filepath.Join(contentRoot, "mybundle", "index.md")
 		contentFiles = append(contentFiles, filename, fmt.Sprintf(pageBundleTemplate, slug, 400+i))
@@ -190,11 +196,20 @@ Content.
 
 	}
 
+	// Add some static files inside the content dir
+	// https://github.com/gohugoio/hugo/issues/5759
+	for _, lang := range []string{"en", "nn", "sv"} {
+		contentRoot := contentRoot(lang)
+		for i := 0; i < 2; i++ {
+			filename := filepath.Join(contentRoot, "mystatic", fmt.Sprintf("file%d.yaml", i))
+			contentFiles = append(contentFiles, filename, lang)
+		}
+	}
+
 	b := newTestSitesBuilder(t)
 	b.WithWorkingDir("/my/project").WithConfigFile("toml", config).WithContent(contentFiles...).CreateSites()
 
 	_ = os.Stdout
-	//printFs(b.H.BaseFs.ContentFs, "/", os.Stdout)
 
 	b.Build(BuildCfg{})
 
@@ -204,11 +219,14 @@ Content.
 	nnSite := b.H.Sites[1]
 	svSite := b.H.Sites[2]
 
+	b.AssertFileContent("/my/project/public/en/mystatic/file1.yaml", "en")
+	b.AssertFileContent("/my/project/public/nn/mystatic/file1.yaml", "nn")
+
 	//dumpPages(nnSite.RegularPages...)
-	assert.Equal(12, len(nnSite.RegularPages))
-	assert.Equal(13, len(enSite.RegularPages))
+	assert.Equal(12, len(nnSite.RegularPages()))
+	assert.Equal(13, len(enSite.RegularPages()))
 
-	assert.Equal(10, len(svSite.RegularPages))
+	assert.Equal(10, len(svSite.RegularPages()))
 
 	svP2, err := svSite.getPageNew(nil, "/sect/page2.md")
 	assert.NoError(err)
@@ -217,9 +235,9 @@ Content.
 
 	enP2, err := enSite.getPageNew(nil, "/sect/page2.md")
 	assert.NoError(err)
-	assert.Equal("en", enP2.Lang())
-	assert.Equal("sv", svP2.Lang())
-	assert.Equal("nn", nnP2.Lang())
+	assert.Equal("en", enP2.Language().Lang)
+	assert.Equal("sv", svP2.Language().Lang)
+	assert.Equal("nn", nnP2.Language().Lang)
 
 	content, _ := nnP2.Content()
 	assert.Contains(content, "SVP3-REF: https://example.org/sv/sect/p-sv-3/")
@@ -241,10 +259,10 @@ Content.
 	assert.NoError(err)
 	assert.Equal("https://example.org/nn/sect/p-nn-3/", nnP3Ref)
 
-	for i, p := range enSite.RegularPages {
+	for i, p := range enSite.RegularPages() {
 		j := i + 1
 		msg := fmt.Sprintf("Test %d", j)
-		assert.Equal("en", p.Lang(), msg)
+		assert.Equal("en", p.Language().Lang, msg)
 		assert.Equal("sect", p.Section())
 		if j < 9 {
 			if j%4 == 0 {
@@ -256,20 +274,20 @@ Content.
 	}
 
 	// Check bundles
-	bundleEn := enSite.RegularPages[len(enSite.RegularPages)-1]
-	bundleNn := nnSite.RegularPages[len(nnSite.RegularPages)-1]
-	bundleSv := svSite.RegularPages[len(svSite.RegularPages)-1]
+	bundleEn := enSite.RegularPages()[len(enSite.RegularPages())-1]
+	bundleNn := nnSite.RegularPages()[len(nnSite.RegularPages())-1]
+	bundleSv := svSite.RegularPages()[len(svSite.RegularPages())-1]
 
 	assert.Equal("/en/sect/mybundle/", bundleEn.RelPermalink())
 	assert.Equal("/sv/sect/mybundle/", bundleSv.RelPermalink())
 
-	assert.Equal(4, len(bundleEn.Resources))
-	assert.Equal(4, len(bundleNn.Resources))
-	assert.Equal(4, len(bundleSv.Resources))
+	assert.Equal(4, len(bundleEn.Resources()))
+	assert.Equal(4, len(bundleNn.Resources()))
+	assert.Equal(4, len(bundleSv.Resources()))
 
-	assert.Equal("/en/sect/mybundle/logo.png", bundleEn.Resources.GetMatch("logo*").RelPermalink())
-	assert.Equal("/nn/sect/mybundle/logo.png", bundleNn.Resources.GetMatch("logo*").RelPermalink())
-	assert.Equal("/sv/sect/mybundle/logo.png", bundleSv.Resources.GetMatch("logo*").RelPermalink())
+	b.AssertFileContent("/my/project/public/en/sect/mybundle/index.html", "image/png: /en/sect/mybundle/logo.png")
+	b.AssertFileContent("/my/project/public/nn/sect/mybundle/index.html", "image/png: /nn/sect/mybundle/logo.png")
+	b.AssertFileContent("/my/project/public/sv/sect/mybundle/index.html", "image/png: /sv/sect/mybundle/logo.png")
 
 	b.AssertFileContent("/my/project/public/sv/sect/mybundle/featured.png", "PNG Data for sv")
 	b.AssertFileContent("/my/project/public/nn/sect/mybundle/featured.png", "PNG Data for nn")
@@ -278,9 +296,9 @@ Content.
 	b.AssertFileContent("/my/project/public/sv/sect/mybundle/logo.png", "PNG Data")
 	b.AssertFileContent("/my/project/public/nn/sect/mybundle/logo.png", "PNG Data")
 
-	nnSect := nnSite.getPage(KindSection, "sect")
+	nnSect := nnSite.getPage(page.KindSection, "sect")
 	assert.NotNil(nnSect)
-	assert.Equal(12, len(nnSect.Pages))
+	assert.Equal(12, len(nnSect.Pages()))
 	nnHome, _ := nnSite.Info.Home()
 	assert.Equal("/nn/", nnHome.RelPermalink())
 
diff --git a/hugolib/media.go b/hugolib/media.go
deleted file mode 100644
index aae9a787030..00000000000
--- a/hugolib/media.go
+++ /dev/null
@@ -1,60 +0,0 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-// An Image contains metadata for images + image sitemaps
-// https://support.google.com/webmasters/answer/178636?hl=en
-type Image struct {
-
-	// The URL of the image. In some cases, the image URL may not be on the
-	// same domain as your main site. This is fine, as long as both domains
-	// are verified in Webmaster Tools. If, for example, you use a
-	// content delivery network (CDN) to host your images, make sure that the
-	// hosting site is verified in Webmaster Tools OR that you submit your
-	// sitemap using robots.txt. In addition, make sure that your robots.txt
-	// file doesn’t disallow the crawling of any content you want indexed.
-	URL     string
-	Title   string
-	Caption string
-	AltText string
-
-	// The geographic location of the image. For example,
-	// <image:geo_location>Limerick, Ireland</image:geo_location>.
-	GeoLocation string
-
-	// A URL to the license of the image.
-	License string
-}
-
-// A Video contains metadata for videos + video sitemaps
-// https://support.google.com/webmasters/answer/80471?hl=en
-type Video struct {
-	ThumbnailLoc         string
-	Title                string
-	Description          string
-	ContentLoc           string
-	PlayerLoc            string
-	Duration             string
-	ExpirationDate       string
-	Rating               string
-	ViewCount            string
-	PublicationDate      string
-	FamilyFriendly       string
-	Restriction          string
-	GalleryLoc           string
-	Price                string
-	RequiresSubscription string
-	Uploader             string
-	Live                 string
-}
diff --git a/hugolib/menu_test.go b/hugolib/menu_test.go
index 6a8c89b95ea..253259af175 100644
--- a/hugolib/menu_test.go
+++ b/hugolib/menu_test.go
@@ -1,4 +1,4 @@
-// Copyright 2017 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -83,9 +83,9 @@ Menu Main:  {{ partial "menu.html" (dict "page" . "menu" "main") }}`,
 
 	s := h.Sites[0]
 
-	require.Len(t, s.Menus, 2)
+	require.Len(t, s.Menus(), 2)
 
-	p1 := s.RegularPages[0].Menus()
+	p1 := s.RegularPages()[0].Menus()
 
 	// There is only one menu in the page, but it is "member of" 2
 	require.Len(t, p1, 1)
diff --git a/hugolib/minify_publisher_test.go b/hugolib/minify_publisher_test.go
index ce183343b44..66e674adead 100644
--- a/hugolib/minify_publisher_test.go
+++ b/hugolib/minify_publisher_test.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -17,13 +17,10 @@ import (
 	"testing"
 
 	"github.com/spf13/viper"
-
-	"github.com/stretchr/testify/require"
 )
 
 func TestMinifyPublisher(t *testing.T) {
 	t.Parallel()
-	assert := require.New(t)
 
 	v := viper.New()
 	v.Set("minify", true)
@@ -43,29 +40,24 @@ func TestMinifyPublisher(t *testing.T) {
 
 <body id="home">
 
-	<h1>{{ .Page.Title }}</h1>
+	<h1>{{ .Title }}</h1>
+	<p>{{ .Permalink }}</p>
 
 </body>
 </html>
 `
 
 	b := newTestSitesBuilder(t)
-	b.WithViper(v).WithContent("page.md", pageWithAlias)
-	b.WithTemplates("_default/list.html", htmlTemplate, "_default/single.html", htmlTemplate, "alias.html", htmlTemplate)
+	b.WithViper(v).WithTemplatesAdded("layouts/index.html", htmlTemplate)
 	b.CreateSites().Build(BuildCfg{})
 
-	assert.Equal(1, len(b.H.Sites))
-	require.Len(t, b.H.Sites[0].RegularPages, 1)
-
 	// Check minification
 	// HTML
-	b.AssertFileContent("public/page/index.html", "<!doctype html><html lang=en><head><meta charset=utf-8><title>HTML5 boilerplate – all you really need…</title><link rel=stylesheet href=css/style.css></head><body id=home><h1>Has Alias</h1></body></html>")
-	// HTML alias. Note the custom template which does no redirect.
-	b.AssertFileContent("public/foo/bar/index.html", "<!doctype html><html lang=en><head><meta charset=utf-8><title>HTML5 boilerplate ")
+	b.AssertFileContent("public/index.html", "<!doctype html>")
 
 	// RSS
 	b.AssertFileContent("public/index.xml", "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\"?><rss version=\"2.0\" xmlns:atom=\"http://www.w3.org/2005/Atom\"><channel><title/><link>https://example.org/</link>")
 
 	// Sitemap
-	b.AssertFileContent("public/sitemap.xml", "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\"?><urlset xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\" xmlns:xhtml=\"http://www.w3.org/1999/xhtml\"><url><loc>https://example.org/</loc><priority>0</priority></url><url>")
+	b.AssertFileContent("public/sitemap.xml", "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\"?><urlset xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\" xmlns:xhtml=\"http://www.w3.org/1999/xhtml\"><url><loc>h")
 }
diff --git a/hugolib/multilingual.go b/hugolib/multilingual.go
index c09e3667e48..6f744f3a532 100644
--- a/hugolib/multilingual.go
+++ b/hugolib/multilingual.go
@@ -1,4 +1,4 @@
-// Copyright 2016-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -62,10 +62,10 @@ func newMultiLingualFromSites(cfg config.Provider, sites ...*Site) (*Multilingua
 	languages := make(langs.Languages, len(sites))
 
 	for i, s := range sites {
-		if s.Language == nil {
-			return nil, errors.New("Missing language for site")
+		if s.language == nil {
+			return nil, errors.New("missing language for site")
 		}
-		languages[i] = s.Language
+		languages[i] = s.language
 	}
 
 	defaultLang := cfg.GetString("defaultContentLanguage")
@@ -78,19 +78,15 @@ func newMultiLingualFromSites(cfg config.Provider, sites ...*Site) (*Multilingua
 
 }
 
-func newMultiLingualForLanguage(language *langs.Language) *Multilingual {
-	languages := langs.Languages{language}
-	return &Multilingual{Languages: languages, DefaultLang: language}
-}
 func (ml *Multilingual) enabled() bool {
 	return len(ml.Languages) > 1
 }
 
 func (s *Site) multilingualEnabled() bool {
-	if s.owner == nil {
+	if s.h == nil {
 		return false
 	}
-	return s.owner.multilingual != nil && s.owner.multilingual.enabled()
+	return s.h.multilingual != nil && s.h.multilingual.enabled()
 }
 
 func toSortedLanguages(cfg config.Provider, l map[string]interface{}) (langs.Languages, error) {
diff --git a/hugolib/orderedMap.go b/hugolib/orderedMap.go
deleted file mode 100644
index 457cd3d6e4b..00000000000
--- a/hugolib/orderedMap.go
+++ /dev/null
@@ -1,99 +0,0 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
-	"fmt"
-	"sync"
-)
-
-type orderedMap struct {
-	sync.RWMutex
-	keys []interface{}
-	m    map[interface{}]interface{}
-}
-
-func newOrderedMap() *orderedMap {
-	return &orderedMap{m: make(map[interface{}]interface{})}
-}
-
-func newOrderedMapFromStringMapString(m map[string]string) *orderedMap {
-	om := newOrderedMap()
-	for k, v := range m {
-		om.Add(k, v)
-	}
-	return om
-}
-
-func (m *orderedMap) Add(k, v interface{}) {
-	m.Lock()
-	defer m.Unlock()
-	_, found := m.m[k]
-	if found {
-		panic(fmt.Sprintf("%v already added", v))
-	}
-	m.m[k] = v
-	m.keys = append(m.keys, k)
-}
-
-func (m *orderedMap) Get(k interface{}) (interface{}, bool) {
-	m.RLock()
-	defer m.RUnlock()
-	v, found := m.m[k]
-	return v, found
-}
-
-func (m *orderedMap) Contains(k interface{}) bool {
-	m.RLock()
-	defer m.RUnlock()
-	_, found := m.m[k]
-	return found
-}
-
-func (m *orderedMap) Keys() []interface{} {
-	m.RLock()
-	defer m.RUnlock()
-	return m.keys
-}
-
-func (m *orderedMap) Len() int {
-	m.RLock()
-	defer m.RUnlock()
-	return len(m.keys)
-}
-
-// Some shortcuts for known types.
-func (m *orderedMap) getShortcode(k interface{}) *shortcode {
-	v, found := m.Get(k)
-	if !found {
-		return nil
-	}
-	return v.(*shortcode)
-}
-
-func (m *orderedMap) getShortcodeRenderer(k interface{}) func() (string, error) {
-	v, found := m.Get(k)
-	if !found {
-		return nil
-	}
-	return v.(func() (string, error))
-}
-
-func (m *orderedMap) getString(k interface{}) string {
-	v, found := m.Get(k)
-	if !found {
-		return ""
-	}
-	return v.(string)
-}
diff --git a/hugolib/orderedMap_test.go b/hugolib/orderedMap_test.go
deleted file mode 100644
index fc3d25080f8..00000000000
--- a/hugolib/orderedMap_test.go
+++ /dev/null
@@ -1,69 +0,0 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
-	"fmt"
-	"sync"
-	"testing"
-
-	"github.com/stretchr/testify/require"
-)
-
-func TestOrderedMap(t *testing.T) {
-	t.Parallel()
-	assert := require.New(t)
-
-	m := newOrderedMap()
-	m.Add("b", "vb")
-	m.Add("c", "vc")
-	m.Add("a", "va")
-	b, f1 := m.Get("b")
-
-	assert.True(f1)
-	assert.Equal(b, "vb")
-	assert.True(m.Contains("b"))
-	assert.False(m.Contains("e"))
-
-	assert.Equal([]interface{}{"b", "c", "a"}, m.Keys())
-
-}
-
-func TestOrderedMapConcurrent(t *testing.T) {
-	t.Parallel()
-	assert := require.New(t)
-
-	var wg sync.WaitGroup
-
-	m := newOrderedMap()
-
-	for i := 1; i < 20; i++ {
-		wg.Add(1)
-		go func(id int) {
-			defer wg.Done()
-			key := fmt.Sprintf("key%d", id)
-			val := key + "val"
-			m.Add(key, val)
-			v, found := m.Get(key)
-			assert.True(found)
-			assert.Equal(v, val)
-			assert.True(m.Contains(key))
-			assert.True(m.Len() > 0)
-			assert.True(len(m.Keys()) > 0)
-		}(i)
-
-	}
-
-	wg.Wait()
-}
diff --git a/hugolib/page.go b/hugolib/page.go
index 71070d1e8cd..72932f17092 100644
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -15,1002 +15,196 @@ package hugolib
 
 import (
 	"bytes"
-	"context"
-	"errors"
 	"fmt"
-	"math/rand"
-	"reflect"
-
-	"github.com/gohugoio/hugo/common/hugo"
-
-	"github.com/gohugoio/hugo/common/maps"
-	"github.com/gohugoio/hugo/common/urls"
-	"github.com/gohugoio/hugo/media"
-
-	"github.com/gohugoio/hugo/langs"
-
-	"github.com/gohugoio/hugo/related"
-
-	"github.com/bep/gitmap"
-
-	"github.com/gohugoio/hugo/helpers"
-	"github.com/gohugoio/hugo/hugolib/pagemeta"
-	"github.com/gohugoio/hugo/resources/resource"
-
-	"github.com/gohugoio/hugo/output"
-	"github.com/mitchellh/mapstructure"
-
 	"html/template"
-	"io"
+	"os"
 	"path"
 	"path/filepath"
-	"regexp"
 	"runtime"
+	"sort"
 	"strings"
-	"sync"
-	"time"
-	"unicode/utf8"
 
-	"github.com/gohugoio/hugo/compare"
-	"github.com/gohugoio/hugo/source"
+	"github.com/bep/gitmap"
 	"github.com/spf13/cast"
-)
 
-var (
-	cjk = regexp.MustCompile(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`)
+	"github.com/gohugoio/hugo/helpers"
 
-	// This is all the kinds we can expect to find in .Site.Pages.
-	allKindsInPages = []string{KindPage, KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm}
+	"github.com/gohugoio/hugo/common/herrors"
+	"github.com/gohugoio/hugo/parser/metadecoders"
 
-	allKinds = append(allKindsInPages, []string{kindRSS, kindSitemap, kindRobotsTXT, kind404}...)
+	"github.com/gohugoio/hugo/parser/pageparser"
+	"github.com/pkg/errors"
 
-	// Assert that it implements the Eqer interface.
-	_ compare.Eqer = (*Page)(nil)
-	_ compare.Eqer = (*PageOutput)(nil)
+	"github.com/gohugoio/hugo/output"
 
-	// Assert that it implements the interface needed for related searches.
-	_ related.Document = (*Page)(nil)
+	"github.com/gohugoio/hugo/media"
+	"github.com/gohugoio/hugo/source"
 
-	// Page supports ref and relref
-	_ urls.RefLinker = (*Page)(nil)
+	"github.com/gohugoio/hugo/common/collections"
+	"github.com/gohugoio/hugo/common/text"
+	"github.com/gohugoio/hugo/resources"
+	"github.com/gohugoio/hugo/resources/page"
+	"github.com/gohugoio/hugo/resources/resource"
 )
 
-// Wraps a Page.
-type pageContainer interface {
-	page() *Page
-}
-
-const (
-	KindPage = "page"
-
-	// The rest are node types; home page, sections etc.
-
-	KindHome         = "home"
-	KindSection      = "section"
-	KindTaxonomy     = "taxonomy"
-	KindTaxonomyTerm = "taxonomyTerm"
-
-	// Temporary state.
-	kindUnknown = "unknown"
-
-	// The following are (currently) temporary nodes,
-	// i.e. nodes we create just to render in isolation.
-	kindRSS       = "RSS"
-	kindSitemap   = "sitemap"
-	kindRobotsTXT = "robotsTXT"
-	kind404       = "404"
-
-	pageResourceType = "page"
+var (
+	_ page.Page           = (*pageState)(nil)
+	_ collections.Grouper = (*pageState)(nil)
+	_ collections.Slicer  = (*pageState)(nil)
 )
 
-type Page struct {
-	*pageInit
-	*pageContentInit
-
-	// Kind is the discriminator that identifies the different page types
-	// in the different page collections. This can, as an example, be used
-	// to to filter regular pages, find sections etc.
-	// Kind will, for the pages available to the templates, be one of:
-	// page, home, section, taxonomy and taxonomyTerm.
-	// It is of string type to make it easy to reason about in
-	// the templates.
-	Kind string
-
-	// Since Hugo 0.18 we got rid of the Node type. So now all pages are ...
-	// pages (regular pages, home page, sections etc.).
-	// Sections etc. will have child pages. These were earlier placed in .Data.Pages,
-	// but can now be more intuitively also be fetched directly from .Pages.
-	// This collection will be nil for regular pages.
-	Pages Pages
-
-	// Since Hugo 0.32, a Page can have resources such as images and CSS associated
-	// with itself. The resource will typically be placed relative to the Page,
-	// but templates should use the links (Permalink and RelPermalink)
-	// provided by the Resource object.
-	Resources resource.Resources
-
-	// This is the raw front matter metadata that is going to be assigned to
-	// the Resources above.
-	resourcesMetadata []map[string]interface{}
-
-	// translations will contain references to this page in other language
-	// if available.
-	translations Pages
-
-	// A key that maps to translation(s) of this page. This value is fetched
-	// from the page front matter.
-	translationKey string
-
-	// Params contains configuration defined in the params section of page frontmatter.
-	params map[string]interface{}
-
-	// Content sections
-	contentv        template.HTML
-	summary         template.HTML
-	TableOfContents template.HTML
-
-	// Passed to the shortcodes
-	pageWithoutContent *PageWithoutContent
-
-	Aliases []string
-
-	Images []Image
-	Videos []Video
-
-	truncated bool
-	Draft     bool
-	Status    string
-
-	// PageMeta contains page stats such as word count etc.
-	PageMeta
-
-	// Markup contains the markup type for the content.
-	Markup string
-
-	extension   string
-	contentType string
-
-	Layout string
-
-	// For npn-renderable pages (see IsRenderable), the content itself
-	// is used as template and the template name is stored here.
-	selfLayout string
-
-	linkTitle string
-
-	// Content items.
-	pageContent
-
-	// whether the content is in a CJK language.
-	isCJKLanguage bool
-
-	// the content stripped for HTML
-	plain      string // TODO should be []byte
-	plainWords []string
-
-	// rendering configuration
-	renderingConfig *helpers.BlackFriday
-
-	// menus
-	pageMenus PageMenus
-
-	source.File
-
-	Position `json:"-"`
-
-	GitInfo *gitmap.GitInfo
-
-	// This was added as part of getting the Nodes (taxonomies etc.) to work as
-	// Pages in Hugo 0.18.
-	// It is deliberately named similar to Section, but not exported (for now).
-	// We currently have only one level of section in Hugo, but the page can live
-	// any number of levels down the file path.
-	// To support taxonomies like /categories/hugo etc. we will need to keep track
-	// of that information in a general way.
-	// So, sections represents the path to the content, i.e. a content file or a
-	// virtual content file in the situations where a taxonomy or a section etc.
-	// isn't accomanied by one.
-	sections []string
-
-	// Will only be set for sections and regular pages.
-	parent *Page
-
-	// When we create paginator pages, we create a copy of the original,
-	// but keep track of it here.
-	origOnCopy *Page
-
-	// Will only be set for section pages and the home page.
-	subSections Pages
-
-	s *Site
-
-	// Pulled over from old Node. TODO(bep) reorg and group (embed)
-
-	Site *SiteInfo `json:"-"`
-
-	title       string
-	Description string
-	Keywords    []string
-	data        map[string]interface{}
-
-	pagemeta.PageDates
-
-	Sitemap Sitemap
-	pagemeta.URLPath
-	frontMatterURL string
-
-	permalink    string
-	relPermalink string
-
-	// relative target path without extension and any base path element
-	// from the baseURL or the language code.
-	// This is used to construct paths in the page resources.
-	relTargetPathBase string
-	// Is set to a forward slashed path if this is a Page resources living in a folder below its owner.
-	resourcePath string
-
-	// This is enabled if it is a leaf bundle (the "index.md" type) and it is marked as headless in front matter.
-	// Being headless means that
-	// 1. The page itself is not rendered to disk
-	// 2. It is not available in .Site.Pages etc.
-	// 3. But you can get it via .Site.GetPage
-	headless bool
-
-	layoutDescriptor output.LayoutDescriptor
-
-	scratch *maps.Scratch
-
-	// It would be tempting to use the language set on the Site, but in they way we do
-	// multi-site processing, these values may differ during the initial page processing.
-	language *langs.Language
-
-	lang string
-
-	// When in Fast Render Mode, we only render a sub set of the pages, i.e. the
-	// pages the user is working on. There are, however, situations where we need to
-	// signal other pages to be rendered.
-	forceRender bool
-
-	// The output formats this page will be rendered to.
-	outputFormats output.Formats
-
-	// This is the PageOutput that represents the first item in outputFormats.
-	// Use with care, as there are potential for inifinite loops.
-	mainPageOutput *PageOutput
-
-	targetPathDescriptorPrototype *targetPathDescriptor
-}
-
-func stackTrace(length int) string {
-	trace := make([]byte, length)
-	runtime.Stack(trace, true)
-	return string(trace)
-}
-
-func (p *Page) Data() interface{} {
-	return p.data
-}
-
-func (p *Page) initContent() {
-
-	p.contentInit.Do(func() {
-		// This careful dance is here to protect against circular loops in shortcode/content
-		// constructs.
-		// TODO(bep) context vs the remote shortcodes
-		ctx, cancel := context.WithTimeout(context.Background(), p.s.Timeout)
-		defer cancel()
-		c := make(chan error, 1)
-
-		p.contentInitMu.Lock()
-		defer p.contentInitMu.Unlock()
-
-		go func() {
-			var err error
-
-			err = p.prepareContent()
-			if err != nil {
-				c <- err
-				return
-			}
-
-			select {
-			case <-ctx.Done():
-				return
-			default:
-			}
-
-			if len(p.summary) == 0 {
-				if err = p.setAutoSummary(); err != nil {
-					err = p.errorf(err, "failed to set auto summary")
-				}
-			}
-			c <- err
-		}()
-
-		select {
-		case <-ctx.Done():
-			p.s.Log.WARN.Printf("Timed out creating content for page %q (.Content will be empty). This is most likely a circular shortcode content loop that should be fixed. If this is just a shortcode calling a slow remote service, try to set \"timeout=30000\" (or higher, value is in milliseconds) in config.toml.\n", p.pathOrTitle())
-		case err := <-c:
-			if err != nil {
-				p.s.SendError(err)
-			}
-		}
-	})
-
-}
-
-// This is sent to the shortcodes for this page. Not doing that will create an infinite regress. So,
-// shortcodes can access .Page.TableOfContents, but not .Page.Content etc.
-func (p *Page) withoutContent() *PageWithoutContent {
-	p.pageInit.withoutContentInit.Do(func() {
-		p.pageWithoutContent = &PageWithoutContent{Page: p}
-	})
-	return p.pageWithoutContent
-}
-
-func (p *Page) Content() (interface{}, error) {
-	return p.content(), nil
-}
-
-func (p *Page) Truncated() bool {
-	p.initContent()
-	return p.truncated
-}
-
-func (p *Page) content() template.HTML {
-	p.initContent()
-	return p.contentv
-}
-
-func (p *Page) Summary() template.HTML {
-	p.initContent()
-	return p.summary
-}
-
-// Sites is a convenience method to get all the Hugo sites/languages configured.
-func (p *Page) Sites() SiteInfos {
-	return p.s.owner.siteInfos()
-}
-
-// SearchKeywords implements the related.Document interface needed for fast page searches.
-func (p *Page) SearchKeywords(cfg related.IndexConfig) ([]related.Keyword, error) {
-
-	v, err := p.Param(cfg.Name)
-	if err != nil {
-		return nil, err
-	}
+var (
+	pageTypesProvider = resource.NewResourceTypesProvider(media.OctetType, pageResourceType)
+	nopPageOutput     = &pageOutput{pagePerOutputProviders: nopPagePerOutput}
+)
 
-	return cfg.ToKeywords(v)
+// pageContext provides contextual information about this page, for error
+// logging and similar.
+type pageContext interface {
+	posOffset(offset int) text.Position
+	wrapError(err error) error
+	getRenderingConfig() *helpers.BlackFriday
 }
 
-// PubDate is when this page was or will be published.
-// NOTE: This is currently used for search only and is not meant to be used
-// directly in templates. We need to consolidate the dates in this struct.
-// TODO(bep) see https://github.com/gohugoio/hugo/issues/3854
-func (p *Page) PubDate() time.Time {
-	if !p.PublishDate.IsZero() {
-		return p.PublishDate
+// wrapErr adds some context to the given error if possible.
+func wrapErr(err error, ctx interface{}) error {
+	if pc, ok := ctx.(pageContext); ok {
+		return pc.wrapError(err)
 	}
-	return p.Date
-}
-
-func (*Page) ResourceType() string {
-	return pageResourceType
+	return err
 }
 
-func (p *Page) RSSLink() template.URL {
-	f, found := p.outputFormats.GetByName(output.RSSFormat.Name)
-	if !found {
-		return ""
-	}
-	return template.URL(newOutputFormat(p, f).Permalink())
+type pageSiteAdapter struct {
+	p page.Page
+	s *Site
 }
 
-func (p *Page) createLayoutDescriptor() output.LayoutDescriptor {
-	var section string
-
-	switch p.Kind {
-	case KindSection:
-		// In Hugo 0.22 we introduce nested sections, but we still only
-		// use the first level to pick the correct template. This may change in
-		// the future.
-		section = p.sections[0]
-	case KindTaxonomy, KindTaxonomyTerm:
-		section = p.s.taxonomiesPluralSingular[p.sections[0]]
-	default:
+func (pa pageSiteAdapter) GetPage(ref string) (page.Page, error) {
+	p, err := pa.s.getPageNew(pa.p, ref)
+	if p == nil {
+		// The nil struct has meaning in some situations, mostly to avoid breaking
+		// existing sites doing $nilpage.IsDescendant($p), which will always return
+		// false.
+		p = page.NilPage
 	}
-
-	return output.LayoutDescriptor{
-		Kind:    p.Kind,
-		Type:    p.Type(),
-		Lang:    p.Lang(),
-		Layout:  p.Layout,
-		Section: section,
-	}
-}
-
-// pageInit lazy initializes different parts of the page. It is extracted
-// into its own type so we can easily create a copy of a given page.
-type pageInit struct {
-	languageInit        sync.Once
-	pageMenusInit       sync.Once
-	pageMetaInit        sync.Once
-	renderingConfigInit sync.Once
-	withoutContentInit  sync.Once
-}
-
-type pageContentInit struct {
-	contentInitMu  sync.Mutex
-	contentInit    sync.Once
-	plainInit      sync.Once
-	plainWordsInit sync.Once
-}
-
-func (p *Page) resetContent() {
-	p.pageContentInit = &pageContentInit{}
+	return p, err
 }
 
-// IsNode returns whether this is an item of one of the list types in Hugo,
-// i.e. not a regular content page.
-func (p *Page) IsNode() bool {
-	return p.Kind != KindPage
-}
+type pageState struct {
+	// This slice will be of same length as the number of global slice of output
+	// formats (for all sites).
+	pageOutputs []*pageOutput
 
-// IsHome returns whether this is the home page.
-func (p *Page) IsHome() bool {
-	return p.Kind == KindHome
-}
+	// This will be shifted out when we start to render a new output format.
+	*pageOutput
 
-// IsSection returns whether this is a section page.
-func (p *Page) IsSection() bool {
-	return p.Kind == KindSection
+	// Common for all output formats.
+	*pageCommon
 }
 
-// IsPage returns whether this is a regular content page.
-func (p *Page) IsPage() bool {
-	return p.Kind == KindPage
-}
-
-// BundleType returns the bundle type: "leaf", "branch" or an empty string if it is none.
-// See https://gohugo.io/content-management/page-bundles/
-func (p *Page) BundleType() string {
-	if p.IsNode() {
-		return "branch"
-	}
-
-	var source interface{} = p.File
-	if fi, ok := source.(*fileInfo); ok {
-		switch fi.bundleTp {
-		case bundleBranch:
-			return "branch"
-		case bundleLeaf:
-			return "leaf"
-		}
+// Eq returns whether the current page equals the given page.
+// This is what's invoked when doing `{{ if eq $page $otherPage }}`
+func (p *pageState) Eq(other interface{}) bool {
+	pp, err := unwrapPage(other)
+	if err != nil {
+		return false
 	}
 
-	return ""
+	return p == pp
 }
 
-func (p *Page) MediaType() media.Type {
-	return media.OctetType
+func (p *pageState) GitInfo() *gitmap.GitInfo {
+	return p.gitInfo
 }
 
-type PageMeta struct {
-	wordCount      int
-	fuzzyWordCount int
-	readingTime    int
-	Weight         int
+func (p *pageState) MarshalJSON() ([]byte, error) {
+	return page.MarshalPageToJSON(p)
 }
 
-type Position struct {
-	PrevPage      *Page
-	NextPage      *Page
-	PrevInSection *Page
-	NextInSection *Page
-}
-
-type Pages []*Page
-
-func (ps Pages) String() string {
-	return fmt.Sprintf("Pages(%d)", len(ps))
-}
-
-// Used in tests.
-func (ps Pages) shuffle() {
-	for i := range ps {
-		j := rand.Intn(i + 1)
-		ps[i], ps[j] = ps[j], ps[i]
-	}
-}
-
-func (ps Pages) findPagePosByFilename(filename string) int {
-	for i, x := range ps {
-		if x.Filename() == filename {
-			return i
-		}
-	}
-	return -1
-}
-
-func (ps Pages) removeFirstIfFound(p *Page) Pages {
-	ii := -1
-	for i, pp := range ps {
-		if pp == p {
-			ii = i
-			break
+func (p *pageState) Pages() page.Pages {
+	p.pagesInit.Do(func() {
+		if p.pages != nil {
+			return
 		}
-	}
-
-	if ii != -1 {
-		ps = append(ps[:ii], ps[ii+1:]...)
-	}
-	return ps
-}
-
-func (ps Pages) findPagePosByFilnamePrefix(prefix string) int {
-	if prefix == "" {
-		return -1
-	}
 
-	lenDiff := -1
-	currPos := -1
-	prefixLen := len(prefix)
+		var pages page.Pages
 
-	// Find the closest match
-	for i, x := range ps {
-		if strings.HasPrefix(x.Filename(), prefix) {
-			diff := len(x.Filename()) - prefixLen
-			if lenDiff == -1 || diff < lenDiff {
-				lenDiff = diff
-				currPos = i
+		switch p.Kind() {
+		case page.KindPage:
+		case page.KindHome:
+			pages = p.s.RegularPages()
+		case page.KindTaxonomy:
+			termInfo := p.getTaxonomyNodeInfo()
+			taxonomy := p.s.Taxonomies[termInfo.plural].Get(termInfo.termKey)
+			pages = taxonomy.Pages()
+		case page.KindTaxonomyTerm:
+			plural := p.getTaxonomyNodeInfo().plural
+			// A list of all page.KindTaxonomy pages with matching plural
+			for _, p := range p.s.findPagesByKind(page.KindTaxonomy) {
+				if p.SectionsEntries()[0] == plural {
+					pages = append(pages, p)
+				}
 			}
+		case kind404, kindSitemap, kindRobotsTXT:
+			pages = p.s.Pages()
 		}
-	}
-	return currPos
-}
-
-// findPagePos Given a page, it will find the position in Pages
-// will return -1 if not found
-func (ps Pages) findPagePos(page *Page) int {
-	for i, x := range ps {
-		if x.Filename() == page.Filename() {
-			return i
-		}
-	}
-	return -1
-}
-
-func (p *Page) Plain() string {
-	p.initContent()
-	p.initPlain(true)
-	return p.plain
-}
 
-func (p *Page) initPlain(lock bool) {
-	p.plainInit.Do(func() {
-		if lock {
-			p.contentInitMu.Lock()
-			defer p.contentInitMu.Unlock()
-		}
-		p.plain = helpers.StripHTML(string(p.contentv))
+		p.pages = pages
 	})
-}
-
-func (p *Page) PlainWords() []string {
-	p.initContent()
-	p.initPlainWords(true)
-	return p.plainWords
-}
-
-func (p *Page) initPlainWords(lock bool) {
-	p.plainWordsInit.Do(func() {
-		if lock {
-			p.contentInitMu.Lock()
-			defer p.contentInitMu.Unlock()
-		}
-		p.plainWords = strings.Fields(p.plain)
-	})
-}
-
-// Param is a convenience method to do lookups in Page's and Site's Params map,
-// in that order.
-//
-// This method is also implemented on Node and SiteInfo.
-func (p *Page) Param(key interface{}) (interface{}, error) {
-	keyStr, err := cast.ToStringE(key)
-	if err != nil {
-		return nil, err
-	}
-
-	keyStr = strings.ToLower(keyStr)
-	result, _ := p.traverseDirect(keyStr)
-	if result != nil {
-		return result, nil
-	}
-
-	keySegments := strings.Split(keyStr, ".")
-	if len(keySegments) == 1 {
-		return nil, nil
-	}
-
-	return p.traverseNested(keySegments)
-}
-
-func (p *Page) traverseDirect(key string) (interface{}, error) {
-	keyStr := strings.ToLower(key)
-	if val, ok := p.params[keyStr]; ok {
-		return val, nil
-	}
-
-	return p.Site.Params[keyStr], nil
-}
-
-func (p *Page) traverseNested(keySegments []string) (interface{}, error) {
-	result := traverse(keySegments, p.params)
-	if result != nil {
-		return result, nil
-	}
-
-	result = traverse(keySegments, p.Site.Params)
-	if result != nil {
-		return result, nil
-	}
-
-	// Didn't find anything, but also no problems.
-	return nil, nil
-}
-
-func traverse(keys []string, m map[string]interface{}) interface{} {
-	// Shift first element off.
-	firstKey, rest := keys[0], keys[1:]
-	result := m[firstKey]
-
-	// No point in continuing here.
-	if result == nil {
-		return result
-	}
-
-	if len(rest) == 0 {
-		// That was the last key.
-		return result
-	}
-
-	// That was not the last key.
-	return traverse(rest, cast.ToStringMap(result))
-}
-
-func (p *Page) Author() Author {
-	authors := p.Authors()
-
-	for _, author := range authors {
-		return author
-	}
-	return Author{}
-}
 
-func (p *Page) Authors() AuthorList {
-	authorKeys, ok := p.params["authors"]
-	if !ok {
-		return AuthorList{}
-	}
-	authors := authorKeys.([]string)
-	if len(authors) < 1 || len(p.Site.Authors) < 1 {
-		return AuthorList{}
-	}
-
-	al := make(AuthorList)
-	for _, author := range authors {
-		a, ok := p.Site.Authors[author]
-		if ok {
-			al[author] = a
-		}
-	}
-	return al
-}
-
-func (p *Page) UniqueID() string {
-	return p.File.UniqueID()
-}
-
-// Returns the page as summary and main.
-func (p *Page) setUserDefinedSummary(rawContentCopy []byte) (*summaryContent, error) {
-
-	sc, err := splitUserDefinedSummaryAndContent(p.Markup, rawContentCopy)
-
-	if err != nil {
-		return nil, err
-	}
-
-	if sc == nil {
-		// No divider found
-		return nil, nil
-	}
-
-	p.summary = helpers.BytesToHTML(sc.summary)
-
-	return sc, nil
-}
-
-// Make this explicit so there is no doubt about what is what.
-type summaryContent struct {
-	summary []byte
-	content []byte
+	return p.pages
 }
 
-func splitUserDefinedSummaryAndContent(markup string, c []byte) (sc *summaryContent, err error) {
-	defer func() {
-		if r := recover(); r != nil {
-			err = fmt.Errorf("summary split failed: %s", r)
-		}
-	}()
-
-	startDivider := bytes.Index(c, internalSummaryDividerBaseBytes)
-
-	if startDivider == -1 {
-		return
-	}
-
-	startTag := "p"
-	switch markup {
-	case "asciidoc":
-		startTag = "div"
-
+// RawContent returns the un-rendered source content without
+// any leading front matter.
+func (p *pageState) RawContent() string {
+	if p.source.parsed == nil {
+		return ""
 	}
-
-	// Walk back and forward to the surrounding tags.
-	start := bytes.LastIndex(c[:startDivider], []byte("<"+startTag))
-	end := bytes.Index(c[startDivider:], []byte("</"+startTag))
-
+	start := p.source.posMainContent
 	if start == -1 {
-		start = startDivider
-	} else {
-		start = startDivider - (startDivider - start)
-	}
-
-	if end == -1 {
-		end = startDivider + len(internalSummaryDividerBase)
-	} else {
-		end = startDivider + end + len(startTag) + 3
-	}
-
-	var addDiv bool
-
-	switch markup {
-	case "rst":
-		addDiv = true
+		start = 0
 	}
-
-	withoutDivider := append(c[:start], bytes.Trim(c[end:], "\n")...)
-
-	var summary []byte
-
-	if len(withoutDivider) > 0 {
-		summary = bytes.TrimSpace(withoutDivider[:start])
-	}
-
-	if addDiv {
-		// For the rst
-		summary = append(append([]byte(nil), summary...), []byte("</div>")...)
-	}
-
-	if err != nil {
-		return
-	}
-
-	sc = &summaryContent{
-		summary: summary,
-		content: bytes.TrimSpace(withoutDivider),
-	}
-
-	return
+	return string(p.source.parsed.Input()[start:])
 }
 
-func (p *Page) setAutoSummary() error {
-	var summary string
-	var truncated bool
-	// This careful init dance could probably be refined, but it is purely for performance
-	// reasons. These "plain" methods are expensive if the plain content is never actually
-	// used.
-	p.initPlain(false)
-	if p.isCJKLanguage {
-		p.initPlainWords(false)
-		summary, truncated = p.s.ContentSpec.TruncateWordsByRune(p.plainWords)
-	} else {
-		summary, truncated = p.s.ContentSpec.TruncateWordsToWholeSentence(p.plain)
-	}
-	p.summary = template.HTML(summary)
-	p.truncated = truncated
-
-	return nil
+func (p *pageState) Resources() resource.Resources {
+	p.resourcesInit.Do(func() {
 
-}
+		sort := func() {
+			sort.SliceStable(p.resources, func(i, j int) bool {
+				ri, rj := p.resources[i], p.resources[j]
+				if ri.ResourceType() < rj.ResourceType() {
+					return true
+				}
 
-func (p *Page) renderContent(content []byte) []byte {
-	return p.s.ContentSpec.RenderBytes(&helpers.RenderingContext{
-		Content: content, RenderTOC: true, PageFmt: p.Markup,
-		Cfg:        p.Language(),
-		DocumentID: p.UniqueID(), DocumentName: p.Path(),
-		Config: p.getRenderingConfig()})
-}
+				p1, ok1 := ri.(page.Page)
+				p2, ok2 := rj.(page.Page)
 
-func (p *Page) getRenderingConfig() *helpers.BlackFriday {
-	p.renderingConfigInit.Do(func() {
-		bfParam := p.getParamToLower("blackfriday")
-		if bfParam == nil {
-			p.renderingConfig = p.s.ContentSpec.BlackFriday
-			return
-		}
-		// Create a copy so we can modify it.
-		bf := *p.s.ContentSpec.BlackFriday
-		p.renderingConfig = &bf
+				if ok1 != ok2 {
+					return ok2
+				}
 
-		if p.Language() == nil {
-			panic(fmt.Sprintf("nil language for %s with source lang %s", p.BaseFileName(), p.lang))
-		}
+				if ok1 {
+					return page.DefaultPageSort(p1, p2)
+				}
 
-		pageParam := cast.ToStringMap(bfParam)
-		if err := mapstructure.Decode(pageParam, &p.renderingConfig); err != nil {
-			p.s.Log.FATAL.Printf("Failed to get rendering config for %s:\n%s", p.BaseFileName(), err.Error())
+				return ri.RelPermalink() < rj.RelPermalink()
+			})
 		}
 
-	})
-
-	return p.renderingConfig
-}
-
-func (s *Site) newPage(filename string) *Page {
-	fi := newFileInfo(
-		s.SourceSpec,
-		s.absContentDir(),
-		filename,
-		nil,
-		bundleNot,
-	)
-	return s.newPageFromFile(fi)
-}
-
-func (s *Site) newPageFromFile(fi *fileInfo) *Page {
-	return &Page{
-		pageInit:        &pageInit{},
-		pageContentInit: &pageContentInit{},
-		Kind:            kindFromFileInfo(fi),
-		contentType:     "",
-		File:            fi,
-		Keywords:        []string{}, Sitemap: Sitemap{Priority: -1},
-		params:       make(map[string]interface{}),
-		translations: make(Pages, 0),
-		sections:     sectionsFromFile(fi),
-		Site:         &s.Info,
-		s:            s,
-	}
-}
-
-func (p *Page) IsRenderable() bool {
-	return p.renderable
-}
-
-func (p *Page) Type() string {
-	if p.contentType != "" {
-		return p.contentType
-	}
-
-	if x := p.Section(); x != "" {
-		return x
-	}
-
-	return "page"
-}
-
-// Section returns the first path element below the content root. Note that
-// since Hugo 0.22 we support nested sections, but this will always be the first
-// element of any nested path.
-func (p *Page) Section() string {
-	if p.Kind == KindSection || p.Kind == KindTaxonomy || p.Kind == KindTaxonomyTerm {
-		return p.sections[0]
-	}
-	return p.File.Section()
-}
+		sort()
 
-func (s *Site) newPageFrom(buf io.Reader, name string) (*Page, error) {
-	p, err := s.NewPage(name)
-	if err != nil {
-		return p, err
-	}
-	_, err = p.ReadFrom(buf)
-	if err != nil {
-		return nil, err
-	}
-
-	return p, err
-}
-
-func (s *Site) NewPage(name string) (*Page, error) {
-	if len(name) == 0 {
-		return nil, errors.New("Zero length page name")
-	}
-
-	// Create new page
-	p := s.newPage(name)
-	p.s = s
-	p.Site = &s.Info
-
-	return p, nil
-}
-
-func (p *Page) ReadFrom(buf io.Reader) (int64, error) {
-	// Parse for metadata & body
-	if err := p.parse(buf); err != nil {
-		return 0, p.errWithFileContext(err)
-
-	}
-
-	if err := p.mapContent(); err != nil {
-		return 0, p.errWithFileContext(err)
-	}
-
-	return int64(len(p.source.parsed.Input())), nil
-}
-
-func (p *Page) WordCount() int {
-	p.initContentPlainAndMeta()
-	return p.wordCount
-}
-
-func (p *Page) ReadingTime() int {
-	p.initContentPlainAndMeta()
-	return p.readingTime
-}
-
-func (p *Page) FuzzyWordCount() int {
-	p.initContentPlainAndMeta()
-	return p.fuzzyWordCount
-}
-
-func (p *Page) initContentPlainAndMeta() {
-	p.initContent()
-	p.initPlain(true)
-	p.initPlainWords(true)
-	p.initMeta()
-}
-
-func (p *Page) initContentAndMeta() {
-	p.initContent()
-	p.initMeta()
-}
-
-func (p *Page) initMeta() {
-	p.pageMetaInit.Do(func() {
-		if p.isCJKLanguage {
-			p.wordCount = 0
-			for _, word := range p.plainWords {
-				runeCount := utf8.RuneCountInString(word)
-				if len(word) == runeCount {
-					p.wordCount++
-				} else {
-					p.wordCount += runeCount
-				}
-			}
-		} else {
-			p.wordCount = helpers.TotalWords(p.plain)
+		if len(p.m.resourcesMetadata) > 0 {
+			resources.AssignMetadata(p.m.resourcesMetadata, p.resources...)
+			sort()
 		}
 
-		// TODO(bep) is set in a test. Fix that.
-		if p.fuzzyWordCount == 0 {
-			p.fuzzyWordCount = (p.wordCount + 100) / 100 * 100
-		}
-
-		if p.isCJKLanguage {
-			p.readingTime = (p.wordCount + 500) / 501
-		} else {
-			p.readingTime = (p.wordCount + 212) / 213
-		}
 	})
+	return p.resources
 }
 
-// HasShortcode return whether the page has a shortcode with the given name.
-// This method is mainly motivated with the Hugo Docs site's need for a list
-// of pages with the `todo` shortcode in it.
-func (p *Page) HasShortcode(name string) bool {
+func (p *pageState) HasShortcode(name string) bool {
 	if p.shortcodeState == nil {
 		return false
 	}
@@ -1018,1106 +212,653 @@ func (p *Page) HasShortcode(name string) bool {
 	return p.shortcodeState.nameSet[name]
 }
 
-// AllTranslations returns all translations, including the current Page.
-func (p *Page) AllTranslations() Pages {
-	return p.translations
+func (p *pageState) Site() page.Site {
+	return &p.s.Info
 }
 
-// IsTranslated returns whether this content file is translated to
-// other language(s).
-func (p *Page) IsTranslated() bool {
-	return len(p.translations) > 1
+func (p *pageState) String() string {
+	if sourceRef := p.sourceRef(); sourceRef != "" {
+		return fmt.Sprintf("Page(%s)", sourceRef)
+	}
+	return fmt.Sprintf("Page(%q)", p.Title())
 }
 
-// Translations returns the translations excluding the current Page.
-func (p *Page) Translations() Pages {
-	translations := make(Pages, 0)
-	for _, t := range p.translations {
-		if t.Lang() != p.Lang() {
-			translations = append(translations, t)
-		}
-	}
-	return translations
+// IsTranslated returns whether this content file is translated to
+// other language(s).
+func (p *pageState) IsTranslated() bool {
+	p.s.h.init.translations.Do()
+	return len(p.translations) > 0
 }
 
 // TranslationKey returns the key used to map language translations of this page.
 // It will use the translationKey set in front matter if set, or the content path and
 // filename (excluding any language code and extension), e.g. "about/index".
 // The Page Kind is always prepended.
-func (p *Page) TranslationKey() string {
-	if p.translationKey != "" {
-		return p.Kind + "/" + p.translationKey
-	}
-
-	if p.IsNode() {
-		return path.Join(p.Kind, path.Join(p.sections...), p.TranslationBaseName())
-	}
-
-	return path.Join(p.Kind, filepath.ToSlash(p.Dir()), p.TranslationBaseName())
-}
-
-func (p *Page) LinkTitle() string {
-	if len(p.linkTitle) > 0 {
-		return p.linkTitle
-	}
-	return p.title
-}
-
-func (p *Page) shouldBuild() bool {
-	return shouldBuild(p.s.BuildFuture, p.s.BuildExpired,
-		p.s.BuildDrafts, p.Draft, p.PublishDate, p.ExpiryDate)
-}
-
-func shouldBuild(buildFuture bool, buildExpired bool, buildDrafts bool, Draft bool,
-	publishDate time.Time, expiryDate time.Time) bool {
-	if !(buildDrafts || !Draft) {
-		return false
-	}
-	if !buildFuture && !publishDate.IsZero() && publishDate.After(time.Now()) {
-		return false
-	}
-	if !buildExpired && !expiryDate.IsZero() && expiryDate.Before(time.Now()) {
-		return false
-	}
-	return true
-}
-
-func (p *Page) IsDraft() bool {
-	return p.Draft
-}
-
-func (p *Page) IsFuture() bool {
-	if p.PublishDate.IsZero() {
-		return false
-	}
-	return p.PublishDate.After(time.Now())
-}
-
-func (p *Page) IsExpired() bool {
-	if p.ExpiryDate.IsZero() {
-		return false
-	}
-	return p.ExpiryDate.Before(time.Now())
-}
-
-func (p *Page) URL() string {
-
-	if p.IsPage() && p.URLPath.URL != "" {
-		// This is the url set in front matter
-		return p.URLPath.URL
-	}
-	// Fall back to the relative permalink.
-	u := p.RelPermalink()
-	return u
-}
-
-// Permalink returns the absolute URL to this Page.
-func (p *Page) Permalink() string {
-	if p.headless {
-		return ""
-	}
-	return p.permalink
-}
-
-// RelPermalink gets a URL to the resource relative to the host.
-func (p *Page) RelPermalink() string {
-	if p.headless {
-		return ""
-	}
-	return p.relPermalink
-}
-
-// See resource.Resource
-// This value is used, by default, in Resources.ByPrefix etc.
-func (p *Page) Name() string {
-	if p.resourcePath != "" {
-		return p.resourcePath
-	}
-	return p.title
-}
+func (p *pageState) TranslationKey() string {
+	p.translationKeyInit.Do(func() {
+		if p.m.translationKey != "" {
+			p.translationKey = p.Kind() + "/" + p.m.translationKey
+		} else if p.IsPage() && p.File() != nil {
+			p.translationKey = path.Join(p.Kind(), filepath.ToSlash(p.File().Dir()), p.File().TranslationBaseName())
+		} else if p.IsNode() {
+			p.translationKey = path.Join(p.Kind(), p.SectionsPath())
+		}
 
-func (p *Page) Title() string {
-	return p.title
-}
+	})
 
-func (p *Page) Params() map[string]interface{} {
-	return p.params
-}
+	return p.translationKey
 
-func (p *Page) subResourceTargetPathFactory(base string) string {
-	return path.Join(p.relTargetPathBase, base)
 }
 
-// Prepare this page for rendering for a new site. The flag start is set
-// for the first site and output format.
-func (p *Page) prepareForRender(start bool) error {
-	p.setContentInit(start)
-	if start {
-		return p.initMainOutputFormat()
-	}
-	return nil
+// AllTranslations returns all translations, including the current Page.
+func (p *pageState) AllTranslations() page.Pages {
+	p.s.h.init.translations.Do()
+	return p.allTranslations
 }
 
-func (p *Page) initMainOutputFormat() error {
-	outFormat := p.outputFormats[0]
-	pageOutput, err := newPageOutput(p, false, false, outFormat)
-
-	if err != nil {
-		return p.errorf(err, "failed to create output page for type %q", outFormat.Name)
-	}
-
-	p.mainPageOutput = pageOutput
-
-	return nil
-
+// Translations returns the translations excluding the current Page.
+func (p *pageState) Translations() page.Pages {
+	p.s.h.init.translations.Do()
+	return p.translations
 }
 
-func (p *Page) setContentInit(start bool) error {
-
-	if start {
-		// This is a new language.
-		p.shortcodeState.clearDelta()
-	}
-	updated := true
-	if p.shortcodeState != nil {
-		updated = p.shortcodeState.updateDelta()
-	}
-
-	if updated {
-		p.resetContent()
-	}
-
-	for _, r := range p.Resources.ByType(pageResourceType) {
-		p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Pages)
-		bp := r.(*Page)
-		if start {
-			bp.shortcodeState.clearDelta()
-		}
-		if bp.shortcodeState != nil {
-			updated = bp.shortcodeState.updateDelta()
-		}
-		if updated {
-			bp.resetContent()
-		}
+func (p *pageState) getRenderingConfig() *helpers.BlackFriday {
+	if p.m.renderingConfig == nil {
+		return p.s.ContentSpec.BlackFriday
 	}
-
-	return nil
-
+	return p.m.renderingConfig
 }
 
-func (p *Page) prepareContent() error {
-	s := p.s
-
-	// If we got this far it means that this is either a new Page pointer
-	// or a template or similar has changed so wee need to do a rerendering
-	// of the shortcodes etc.
-
-	// If in watch mode or if we have multiple sites or output formats,
-	// we need to keep the original so we can
-	// potentially repeat this process on rebuild.
-	needsACopy := s.running() || len(s.owner.Sites) > 1 || len(p.outputFormats) > 1
-	var workContentCopy []byte
-	if needsACopy {
-		workContentCopy = make([]byte, len(p.workContent))
-		copy(workContentCopy, p.workContent)
-	} else {
-		// Just reuse the same slice.
-		workContentCopy = p.workContent
-	}
-
-	var err error
-	// Note: The shortcodes in a page cannot access the page content it lives in,
-	// hence the withoutContent().
-	if workContentCopy, err = handleShortcodes(p.withoutContent(), workContentCopy); err != nil {
-		return err
+func (ps *pageState) initCommonProviders(pp pagePaths) error {
+	if ps.IsPage() {
+		ps.posNextPrev = &nextPrev{init: ps.s.init.prevNext}
+		ps.posNextPrevSection = &nextPrev{init: ps.s.init.prevNextInSection}
+		ps.InSectionPositioner = newPagePositionInSection(ps.posNextPrevSection)
+		ps.Positioner = newPagePosition(ps.posNextPrev)
 	}
 
-	if p.Markup != "html" && p.source.hasSummaryDivider {
-
-		// Now we know enough to create a summary of the page and count some words
-		summaryContent, err := p.setUserDefinedSummary(workContentCopy)
-
-		if err != nil {
-			s.Log.ERROR.Printf("Failed to set user defined summary for page %q: %s", p.Path(), err)
-		} else if summaryContent != nil {
-			workContentCopy = summaryContent.content
-		}
-
-		p.contentv = helpers.BytesToHTML(workContentCopy)
-
-	} else {
-		p.contentv = helpers.BytesToHTML(workContentCopy)
-	}
+	ps.OutputFormatsProvider = pp
+	ps.targetPathDescriptor = pp.targetPathDescriptor
+	ps.RefProvider = newPageRef(ps)
+	ps.SitesProvider = &ps.s.Info
 
 	return nil
 }
 
-func (p *Page) updateMetaData(frontmatter map[string]interface{}) error {
-	if frontmatter == nil {
-		return errors.New("missing frontmatter data")
-	}
-	// Needed for case insensitive fetching of params values
-	maps.ToLower(frontmatter)
-
-	var mtime time.Time
-	if p.FileInfo() != nil {
-		mtime = p.FileInfo().ModTime()
-	}
-
-	var gitAuthorDate time.Time
-	if p.GitInfo != nil {
-		gitAuthorDate = p.GitInfo.AuthorDate
-	}
+func (p *pageState) getLayoutDescriptor() output.LayoutDescriptor {
+	p.layoutDescriptorInit.Do(func() {
+		var section string
+		sections := p.SectionsEntries()
 
-	descriptor := &pagemeta.FrontMatterDescriptor{
-		Frontmatter:   frontmatter,
-		Params:        p.params,
-		Dates:         &p.PageDates,
-		PageURLs:      &p.URLPath,
-		BaseFilename:  p.ContentBaseName(),
-		ModTime:       mtime,
-		GitAuthorDate: gitAuthorDate,
-	}
-
-	// Handle the date separately
-	// TODO(bep) we need to "do more" in this area so this can be split up and
-	// more easily tested without the Page, but the coupling is strong.
-	err := p.s.frontmatterHandler.HandleDates(descriptor)
-	if err != nil {
-		p.s.Log.ERROR.Printf("Failed to handle dates for page %q: %s", p.Path(), err)
-	}
-
-	var draft, published, isCJKLanguage *bool
-	for k, v := range frontmatter {
-		loki := strings.ToLower(k)
-
-		if loki == "published" { // Intentionally undocumented
-			vv, err := cast.ToBoolE(v)
-			if err == nil {
-				published = &vv
-			}
-			// published may also be a date
-			continue
+		switch p.Kind() {
+		case page.KindSection:
+			section = sections[0]
+		case page.KindTaxonomyTerm:
+			section = p.getTaxonomyNodeInfo().singular
+		case page.KindTaxonomy:
+			section = p.getTaxonomyNodeInfo().parent.singular
+		default:
 		}
 
-		if p.s.frontmatterHandler.IsDateKey(loki) {
-			continue
+		p.layoutDescriptor = output.LayoutDescriptor{
+			Kind:    p.Kind(),
+			Type:    p.Type(),
+			Lang:    p.Language().Lang,
+			Layout:  p.Layout(),
+			Section: section,
 		}
+	})
 
-		switch loki {
-		case "title":
-			p.title = cast.ToString(v)
-			p.params[loki] = p.title
-		case "linktitle":
-			p.linkTitle = cast.ToString(v)
-			p.params[loki] = p.linkTitle
-		case "description":
-			p.Description = cast.ToString(v)
-			p.params[loki] = p.Description
-		case "slug":
-			p.Slug = cast.ToString(v)
-			p.params[loki] = p.Slug
-		case "url":
-			if url := cast.ToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
-				return fmt.Errorf("Only relative URLs are supported, %v provided", url)
-			}
-			p.URLPath.URL = cast.ToString(v)
-			p.frontMatterURL = p.URLPath.URL
-			p.params[loki] = p.URLPath.URL
-		case "type":
-			p.contentType = cast.ToString(v)
-			p.params[loki] = p.contentType
-		case "extension", "ext":
-			p.extension = cast.ToString(v)
-			p.params[loki] = p.extension
-		case "keywords":
-			p.Keywords = cast.ToStringSlice(v)
-			p.params[loki] = p.Keywords
-		case "headless":
-			// For now, only the leaf bundles ("index.md") can be headless (i.e. produce no output).
-			// We may expand on this in the future, but that gets more complex pretty fast.
-			if p.TranslationBaseName() == "index" {
-				p.headless = cast.ToBool(v)
-			}
-			p.params[loki] = p.headless
-		case "outputs":
-			o := cast.ToStringSlice(v)
-			if len(o) > 0 {
-				// Output formats are exlicitly set in front matter, use those.
-				outFormats, err := p.s.outputFormatsConfig.GetByNames(o...)
-
-				if err != nil {
-					p.s.Log.ERROR.Printf("Failed to resolve output formats: %s", err)
-				} else {
-					p.outputFormats = outFormats
-					p.params[loki] = outFormats
-				}
+	return p.layoutDescriptor
 
-			}
-		case "draft":
-			draft = new(bool)
-			*draft = cast.ToBool(v)
-		case "layout":
-			p.Layout = cast.ToString(v)
-			p.params[loki] = p.Layout
-		case "markup":
-			p.Markup = cast.ToString(v)
-			p.params[loki] = p.Markup
-		case "weight":
-			p.Weight = cast.ToInt(v)
-			p.params[loki] = p.Weight
-		case "aliases":
-			p.Aliases = cast.ToStringSlice(v)
-			for _, alias := range p.Aliases {
-				if strings.HasPrefix(alias, "http://") || strings.HasPrefix(alias, "https://") {
-					return fmt.Errorf("Only relative aliases are supported, %v provided", alias)
-				}
-			}
-			p.params[loki] = p.Aliases
-		case "status":
-			p.Status = cast.ToString(v)
-			p.params[loki] = p.Status
-		case "sitemap":
-			p.Sitemap = parseSitemap(cast.ToStringMap(v))
-			p.params[loki] = p.Sitemap
-		case "iscjklanguage":
-			isCJKLanguage = new(bool)
-			*isCJKLanguage = cast.ToBool(v)
-		case "translationkey":
-			p.translationKey = cast.ToString(v)
-			p.params[loki] = p.translationKey
-		case "resources":
-			var resources []map[string]interface{}
-			handled := true
-
-			switch vv := v.(type) {
-			case []map[interface{}]interface{}:
-				for _, vvv := range vv {
-					resources = append(resources, cast.ToStringMap(vvv))
-				}
-			case []map[string]interface{}:
-				resources = append(resources, vv...)
-			case []interface{}:
-				for _, vvv := range vv {
-					switch vvvv := vvv.(type) {
-					case map[interface{}]interface{}:
-						resources = append(resources, cast.ToStringMap(vvvv))
-					case map[string]interface{}:
-						resources = append(resources, vvvv)
-					}
-				}
-			default:
-				handled = false
-			}
+}
 
-			if handled {
-				p.params[loki] = resources
-				p.resourcesMetadata = resources
-				break
-			}
-			fallthrough
+func (p *pageState) getLayouts(layouts ...string) ([]string, error) {
+	f := p.outputFormat()
 
-		default:
-			// If not one of the explicit values, store in Params
-			switch vv := v.(type) {
-			case bool:
-				p.params[loki] = vv
-			case string:
-				p.params[loki] = vv
-			case int64, int32, int16, int8, int:
-				p.params[loki] = vv
-			case float64, float32:
-				p.params[loki] = vv
-			case time.Time:
-				p.params[loki] = vv
-			default: // handle array of strings as well
-				switch vvv := vv.(type) {
-				case []interface{}:
-					if len(vvv) > 0 {
-						switch vvv[0].(type) {
-						case map[interface{}]interface{}: // Proper parsing structured array from YAML based FrontMatter
-							p.params[loki] = vvv
-						case map[string]interface{}: // Proper parsing structured array from JSON based FrontMatter
-							p.params[loki] = vvv
-						case []interface{}:
-							p.params[loki] = vvv
-						default:
-							a := make([]string, len(vvv))
-							for i, u := range vvv {
-								a[i] = cast.ToString(u)
-							}
-
-							p.params[loki] = a
-						}
-					} else {
-						p.params[loki] = []string{}
-					}
-				default:
-					p.params[loki] = vv
-				}
-			}
+	if len(layouts) == 0 {
+		selfLayout := p.selfLayoutForOutput(f)
+		if selfLayout != "" {
+			return []string{selfLayout}, nil
 		}
 	}
 
-	// Try markup explicitly set in the frontmatter
-	p.Markup = helpers.GuessType(p.Markup)
-	if p.Markup == "unknown" {
-		// Fall back to file extension (might also return "unknown")
-		p.Markup = helpers.GuessType(p.Ext())
+	layoutDescriptor := p.getLayoutDescriptor()
+
+	if len(layouts) > 0 {
+		layoutDescriptor.Layout = layouts[0]
+		layoutDescriptor.LayoutOverride = true
 	}
 
-	if draft != nil && published != nil {
-		p.Draft = *draft
-		p.s.Log.WARN.Printf("page %q has both draft and published settings in its frontmatter. Using draft.", p.Filename())
-	} else if draft != nil {
-		p.Draft = *draft
-	} else if published != nil {
-		p.Draft = !*published
+	return p.s.layoutHandler.For(layoutDescriptor, f)
+}
+
+// This is serialized
+func (p *pageState) initOutputFormat(idx int) error {
+	if err := p.shiftToOutputFormat(idx); err != nil {
+		return err
 	}
-	p.params["draft"] = p.Draft
 
-	if isCJKLanguage != nil {
-		p.isCJKLanguage = *isCJKLanguage
-	} else if p.s.Cfg.GetBool("hasCJKLanguage") {
-		if cjk.Match(p.source.parsed.Input()) {
-			p.isCJKLanguage = true
-		} else {
-			p.isCJKLanguage = false
+	if !p.renderable {
+		if _, err := p.Content(); err != nil {
+			return err
 		}
 	}
-	p.params["iscjklanguage"] = p.isCJKLanguage
 
 	return nil
-}
 
-func (p *Page) GetParam(key string) interface{} {
-	return p.getParam(key, false)
 }
 
-func (p *Page) getParamToLower(key string) interface{} {
-	return p.getParam(key, true)
+// Must be run after the site section tree etc. is built and ready.
+func (p *pageState) initPage() error {
+	if _, err := p.init.Do(); err != nil {
+		return err
+	}
+	return nil
 }
 
-func (p *Page) getParam(key string, stringToLower bool) interface{} {
-	v := p.params[strings.ToLower(key)]
+func (p *pageState) setPages(pages page.Pages) {
+	page.SortByDefault(pages)
+	p.pages = pages
+}
 
-	if v == nil {
-		return nil
-	}
+func (p *pageState) renderResources() error {
+	var toBeDeleted []int
 
-	switch val := v.(type) {
-	case bool:
-		return val
-	case string:
-		if stringToLower {
-			return strings.ToLower(val)
+	for i, r := range p.Resources() {
+		src, ok := r.(resource.Source)
+		if !ok {
+			// Pages gets rendered with the owning page.
+			continue
 		}
-		return val
-	case int64, int32, int16, int8, int:
-		return cast.ToInt(v)
-	case float64, float32:
-		return cast.ToFloat64(v)
-	case time.Time:
-		return val
-	case []string:
-		if stringToLower {
-			return helpers.SliceToLower(val)
+
+		if err := src.Publish(); err != nil {
+			if os.IsNotExist(err) {
+				// The resource has been deleted from the file system.
+				// This should be extremely rare, but can happen on live reload in server
+				// mode when the same resource is member of different page bundles.
+				toBeDeleted = append(toBeDeleted, i)
+			} else {
+				p.s.Log.ERROR.Printf("Failed to publish Resource for page %q: %s", p.pathOrTitle(), err)
+			}
+		} else {
+			p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Files)
 		}
-		return v
-	case map[string]interface{}: // JSON and TOML
-		return v
-	case map[interface{}]interface{}: // YAML
-		return v
 	}
 
-	p.s.Log.ERROR.Printf("GetParam(\"%s\"): Unknown type %s\n", key, reflect.TypeOf(v))
+	for _, i := range toBeDeleted {
+		p.deleteResource(i)
+	}
+
 	return nil
 }
 
-func (p *Page) HasMenuCurrent(menuID string, me *MenuEntry) bool {
-
-	sectionPagesMenu := p.Site.sectionPagesMenu
+func (p *pageState) deleteResource(i int) {
+	p.resources = append(p.resources[:i], p.resources[i+1:]...)
+}
 
-	// page is labeled as "shadow-member" of the menu with the same identifier as the section
-	if sectionPagesMenu != "" {
-		section := p.Section()
+func (p *pageState) getTargetPaths() page.TargetPaths {
+	return p.targetPaths()
+}
 
-		if section != "" && sectionPagesMenu == menuID && section == me.Identifier {
-			return true
+func (p *pageState) setTranslations(pages page.Pages) {
+	p.allTranslations = pages
+	page.SortByLanguage(p.allTranslations)
+	translations := make(page.Pages, 0)
+	for _, t := range p.allTranslations {
+		if !t.Eq(p) {
+			translations = append(translations, t)
 		}
 	}
+	p.translations = translations
+}
 
-	if !me.HasChildren() {
-		return false
-	}
-
-	menus := p.Menus()
-
-	if m, ok := menus[menuID]; ok {
-
-		for _, child := range me.Children {
-			if child.IsEqual(m) {
-				return true
-			}
-			if p.HasMenuCurrent(menuID, child) {
-				return true
-			}
+func (p *pageState) AlternativeOutputFormats() page.OutputFormats {
+	f := p.outputFormat()
+	var o page.OutputFormats
+	for _, of := range p.OutputFormats() {
+		if of.Format.NotAlternative || of.Format.Name == f.Name {
+			continue
 		}
 
+		o = append(o, of)
 	}
+	return o
+}
 
-	if p.IsPage() {
-		return false
+func (p *pageState) Render(layout ...string) template.HTML {
+	l, err := p.getLayouts(layout...)
+	if err != nil {
+		p.s.SendError(p.wrapError(errors.Errorf(".Render: failed to resolve layout %v", layout)))
+		return ""
 	}
 
-	// The following logic is kept from back when Hugo had both Page and Node types.
-	// TODO(bep) consolidate / clean
-	nme := MenuEntry{Page: p, Name: p.title, URL: p.URL()}
-
-	for _, child := range me.Children {
-		if nme.IsSameResource(child) {
-			return true
+	for _, layout := range l {
+		templ, found := p.s.Tmpl.Lookup(layout)
+		if !found {
+			// This is legacy from when we had only one output format and
+			// HTML templates only. Some have references to layouts without suffix.
+			// We default to good old HTML.
+			templ, _ = p.s.Tmpl.Lookup(layout + ".html")
 		}
-		if p.HasMenuCurrent(menuID, child) {
-			return true
+		if templ != nil {
+			res, err := executeToString(templ, p)
+			if err != nil {
+				p.s.SendError(p.wrapError(errors.Wrapf(err, ".Render: failed to execute template %q v", layout)))
+				return ""
+			}
+			return template.HTML(res)
 		}
 	}
 
-	return false
+	return ""
 
 }
 
-func (p *Page) IsMenuCurrent(menuID string, inme *MenuEntry) bool {
+// wrapError adds some more context to the given error if possible
+func (p *pageState) wrapError(err error) error {
 
-	menus := p.Menus()
-
-	if me, ok := menus[menuID]; ok {
-		if me.IsEqual(inme) {
-			return true
-		}
-	}
-
-	if p.IsPage() {
-		return false
+	var filename string
+	if p.File() != nil {
+		filename = p.File().Filename()
 	}
 
-	// The following logic is kept from back when Hugo had both Page and Node types.
-	// TODO(bep) consolidate / clean
-	me := MenuEntry{Page: p, Name: p.title, URL: p.URL()}
-
-	if !me.IsSameResource(inme) {
-		return false
-	}
+	err, _ = herrors.WithFileContextForFile(
+		err,
+		filename,
+		filename,
+		p.s.SourceSpec.Fs.Source,
+		herrors.SimpleLineMatcher)
 
-	// this resource may be included in several menus
-	// search for it to make sure that it is in the menu with the given menuId
-	if menu, ok := (*p.Site.Menus)[menuID]; ok {
-		for _, menuEntry := range *menu {
-			if menuEntry.IsSameResource(inme) {
-				return true
-			}
+	return err
+}
 
-			descendantFound := p.isSameAsDescendantMenu(inme, menuEntry)
-			if descendantFound {
-				return descendantFound
-			}
+func (p *pageState) addResources(r ...resource.Resource) {
+	p.resources = append(p.resources, r...)
+}
 
-		}
+func (p *pageState) addSectionToParent() {
+	if p.parent == nil {
+		return
 	}
-
-	return false
+	p.parent.subSections = append(p.parent.subSections, p)
 }
 
-func (p *Page) isSameAsDescendantMenu(inme *MenuEntry, parent *MenuEntry) bool {
-	if parent.HasChildren() {
-		for _, child := range parent.Children {
-			if child.IsSameResource(inme) {
-				return true
-			}
-			descendantFound := p.isSameAsDescendantMenu(inme, child)
-			if descendantFound {
-				return descendantFound
-			}
-		}
+func (p *pageState) contentMarkupType() string {
+	if p.m.markup != "" {
+		return p.m.markup
+
 	}
-	return false
+	return p.File().Ext()
 }
 
-func (p *Page) Menus() PageMenus {
-	p.pageMenusInit.Do(func() {
-		p.pageMenus = PageMenus{}
-
-		ms, ok := p.params["menus"]
-		if !ok {
-			ms, ok = p.params["menu"]
-		}
+func (p *pageState) mapContent(meta *pageMeta) error {
 
-		if ok {
-			link := p.RelPermalink()
+	s := p.shortcodeState
 
-			me := MenuEntry{Page: p, Name: p.LinkTitle(), Weight: p.Weight, URL: link}
+	p.renderable = true
 
-			// Could be the name of the menu to attach it to
-			mname, err := cast.ToStringE(ms)
+	rn := &pageContentMap{
+		items: make([]interface{}, 0, 20),
+	}
 
-			if err == nil {
-				me.Menu = mname
-				p.pageMenus[mname] = &me
-				return
-			}
+	iter := p.source.parsed.Iterator()
 
-			// Could be a slice of strings
-			mnames, err := cast.ToStringSliceE(ms)
+	fail := func(err error, i pageparser.Item) error {
+		return p.parseError(err, iter.Input(), i.Pos)
+	}
 
-			if err == nil {
-				for _, mname := range mnames {
-					me.Menu = mname
-					p.pageMenus[mname] = &me
-				}
-				return
-			}
+	// the parser is guaranteed to return items in proper order or fail, so …
+	// … it's safe to keep some "global" state
+	var currShortcode shortcode
+	var ordinal int
 
-			// Could be a structured menu entry
-			menus, err := cast.ToStringMapE(ms)
+Loop:
+	for {
+		it := iter.Next()
 
+		switch {
+		case it.Type == pageparser.TypeIgnore:
+		case it.Type == pageparser.TypeHTMLStart:
+			// This is HTML without front matter. It can still have shortcodes.
+			p.selfLayout = "__" + p.File().Filename()
+			p.renderable = false
+			rn.AddBytes(it)
+		case it.IsFrontMatter():
+			f := metadecoders.FormatFromFrontMatterType(it.Type)
+			m, err := metadecoders.Default.UnmarshalToMap(it.Val, f)
 			if err != nil {
-				p.s.Log.ERROR.Printf("unable to process menus for %q\n", p.title)
-			}
-
-			for name, menu := range menus {
-				menuEntry := MenuEntry{Page: p, Name: p.LinkTitle(), URL: link, Weight: p.Weight, Menu: name}
-				if menu != nil {
-					p.s.Log.DEBUG.Printf("found menu: %q, in %q\n", name, p.title)
-					ime, err := cast.ToStringMapE(menu)
-					if err != nil {
-						p.s.Log.ERROR.Printf("unable to process menus for %q: %s", p.title, err)
-					}
-
-					menuEntry.marshallMap(ime)
+				if fe, ok := err.(herrors.FileError); ok {
+					return herrors.ToFileErrorWithOffset(fe, iter.LineNumber()-1)
+				} else {
+					return err
 				}
-				p.pageMenus[name] = &menuEntry
+			}
 
+			if err := meta.setMetadata(p, m); err != nil {
+				return err
 			}
-		}
-	})
 
-	return p.pageMenus
-}
+			next := iter.Peek()
+			if !next.IsDone() {
+				p.source.posMainContent = next.Pos
+			}
 
-func (p *Page) shouldRenderTo(f output.Format) bool {
-	_, found := p.outputFormats.GetByName(f.Name)
-	return found
-}
+			if !p.s.shouldBuild(p) {
+				// Nothing more to do.
+				return nil
+			}
 
-// RawContent returns the un-rendered source content without
-// any leading front matter.
-func (p *Page) RawContent() string {
-	if p.source.posMainContent == -1 {
-		return ""
-	}
-	return string(p.source.parsed.Input()[p.source.posMainContent:])
-}
+		case it.Type == pageparser.TypeLeadSummaryDivider:
+			posBody := -1
+			f := func(item pageparser.Item) bool {
+				if posBody == -1 && !item.IsDone() {
+					posBody = item.Pos
+				}
 
-func (p *Page) FullFilePath() string {
-	return filepath.Join(p.Dir(), p.LogicalName())
-}
+				if item.IsNonWhitespace() {
+					p.truncated = true
 
-// Returns the canonical, absolute fully-qualifed logical reference used by
-// methods such as GetPage and ref/relref shortcodes to refer to
-// this page. It is prefixed with a "/".
-//
-// For pages that have a source file, it is returns the path to this file as an
-// absolute path rooted in this site's content dir.
-// For pages that do not (sections witout content page etc.), it returns the
-// virtual path, consistent with where you would add a source file.
-func (p *Page) absoluteSourceRef() string {
-	if p.File != nil {
-		sourcePath := p.Path()
-		if sourcePath != "" {
-			return "/" + filepath.ToSlash(sourcePath)
-		}
-	}
+					// Done
+					return false
+				}
+				return true
+			}
+			iter.PeekWalk(f)
 
-	if len(p.sections) > 0 {
-		// no backing file, return the virtual source path
-		return "/" + path.Join(p.sections...)
-	}
+			p.source.posSummaryEnd = it.Pos
+			p.source.posBodyStart = posBody
+			p.source.hasSummaryDivider = true
 
-	return ""
-}
+			if meta.markup != "html" {
+				// The content will be rendered by Blackfriday or similar,
+				// and we need to track the summary.
+				rn.AddReplacement(internalSummaryDividerPre, it)
+			}
 
-// Pre render prepare steps
+		// Handle shortcode
+		case it.IsLeftShortcodeDelim():
+			// let extractShortcode handle left delim (will do so recursively)
+			iter.Backup()
 
-func (p *Page) prepareLayouts() error {
-	// TODO(bep): Check the IsRenderable logic.
-	if p.Kind == KindPage {
-		if !p.IsRenderable() {
-			self := "__" + p.UniqueID()
-			err := p.s.TemplateHandler().AddLateTemplate(self, string(p.content()))
+			currShortcode, err := s.extractShortcode(ordinal, 0, iter)
 			if err != nil {
-				return err
+				return fail(errors.Wrap(err, "failed to extract shortcode"), it)
 			}
-			p.selfLayout = self
-		}
-	}
-
-	return nil
-}
 
-func (p *Page) prepareData(s *Site) error {
-	if p.Kind != KindSection {
-		var pages Pages
-		p.data = make(map[string]interface{})
-
-		switch p.Kind {
-		case KindPage:
-		case KindHome:
-			pages = s.RegularPages
-		case KindTaxonomy:
-			plural := p.sections[0]
-			term := p.sections[1]
-
-			if s.Info.preserveTaxonomyNames {
-				if v, ok := s.taxonomiesOrigKey[fmt.Sprintf("%s-%s", plural, term)]; ok {
-					term = v
-				}
+			currShortcode.pos = it.Pos
+			currShortcode.length = iter.Current().Pos - it.Pos
+			if currShortcode.placeholder == "" {
+				currShortcode.placeholder = createShortcodePlaceholder("s", currShortcode.ordinal)
 			}
 
-			singular := s.taxonomiesPluralSingular[plural]
-			taxonomy := s.Taxonomies[plural].Get(term)
-
-			p.data[singular] = taxonomy
-			p.data["Singular"] = singular
-			p.data["Plural"] = plural
-			p.data["Term"] = term
-			pages = taxonomy.Pages()
-		case KindTaxonomyTerm:
-			plural := p.sections[0]
-			singular := s.taxonomiesPluralSingular[plural]
-
-			p.data["Singular"] = singular
-			p.data["Plural"] = plural
-			p.data["Terms"] = s.Taxonomies[plural]
-			// keep the following just for legacy reasons
-			p.data["OrderedIndex"] = p.data["Terms"]
-			p.data["Index"] = p.data["Terms"]
-
-			// A list of all KindTaxonomy pages with matching plural
-			for _, p := range s.findPagesByKind(KindTaxonomy) {
-				if p.sections[0] == plural {
-					pages = append(pages, p)
-				}
+			if currShortcode.name != "" {
+				s.nameSet[currShortcode.name] = true
 			}
-		}
 
-		p.data["Pages"] = pages
-		p.Pages = pages
-	}
+			if currShortcode.params == nil {
+				var s []string
+				currShortcode.params = s
+			}
 
-	// Now we know enough to set missing dates on home page etc.
-	p.updatePageDates()
+			currShortcode.placeholder = createShortcodePlaceholder("s", ordinal)
+			ordinal++
+			s.shortcodes = append(s.shortcodes, currShortcode)
 
-	return nil
-}
+			rn.AddShortcode(currShortcode)
 
-func (p *Page) updatePageDates() {
-	// TODO(bep) there is a potential issue with page sorting for home pages
-	// etc. without front matter dates set, but let us wrap the head around
-	// that in another time.
-	if !p.IsNode() {
-		return
-	}
+		case it.Type == pageparser.TypeEmoji:
+			if emoji := helpers.Emoji(it.ValStr()); emoji != nil {
+				rn.AddReplacement(emoji, it)
+			} else {
+				rn.AddBytes(it)
+			}
+		case it.IsEOF():
+			break Loop
+		case it.IsError():
+			err := fail(errors.WithStack(errors.New(it.ValStr())), it)
+			currShortcode.err = err
+			return err
 
-	if !p.Date.IsZero() {
-		if p.Lastmod.IsZero() {
-			p.Lastmod = p.Date
-		}
-		return
-	} else if !p.Lastmod.IsZero() {
-		if p.Date.IsZero() {
-			p.Date = p.Lastmod
+		default:
+			rn.AddBytes(it)
 		}
-		return
 	}
 
-	// Set it to the first non Zero date in children
-	var foundDate, foundLastMod bool
+	p.cmap = rn
 
-	for _, child := range p.Pages {
-		if !child.Date.IsZero() {
-			p.Date = child.Date
-			foundDate = true
-		}
-		if !child.Lastmod.IsZero() {
-			p.Lastmod = child.Lastmod
-			foundLastMod = true
-		}
+	return nil
+}
 
-		if foundDate && foundLastMod {
-			break
-		}
+func (p *pageState) errorf(err error, format string, a ...interface{}) error {
+	if herrors.UnwrapErrorWithFileContext(err) != nil {
+		// More isn't always better.
+		return err
+	}
+	args := append([]interface{}{p.Language().Lang, p.pathOrTitle()}, a...)
+	format = "[%s] page %q: " + format
+	if err == nil {
+		errors.Errorf(format, args...)
+		return fmt.Errorf(format, args...)
 	}
+	return errors.Wrapf(err, format, args...)
 }
 
-// copy creates a copy of this page with the lazy sync.Once vars reset
-// so they will be evaluated again, for word count calculations etc.
-func (p *Page) copy(initContent bool) *Page {
-	p.contentInitMu.Lock()
-	c := *p
-	p.contentInitMu.Unlock()
-	c.pageInit = &pageInit{}
-	if initContent {
-		if len(p.outputFormats) < 2 {
-			panic(fmt.Sprintf("programming error: page %q should not need to rebuild content as it has only %d outputs", p.Path(), len(p.outputFormats)))
-		}
-		c.pageContentInit = &pageContentInit{}
+func (p *pageState) outputFormat() (f output.Format) {
+	if p.pageOutput == nil {
+		panic("no pageOutput")
 	}
-	return &c
+	return p.pageOutput.f
 }
 
-func (p *Page) Hugo() hugo.Info {
-	return p.s.Info.hugoInfo
-}
+func (p *pageState) parseError(err error, input []byte, offset int) error {
+	if herrors.UnwrapFileError(err) != nil {
+		// Use the most specific location.
+		return err
+	}
+	pos := p.posFromInput(input, offset)
+	return herrors.NewFileError("md", -1, pos.LineNumber, pos.ColumnNumber, err)
 
-// GetPage looks up a page for the given ref.
-//    {{ with .GetPage "blog" }}{{ .Title }}{{ end }}
-//
-// This will return nil when no page could be found, and will return
-// an error if the ref is ambiguous.
-func (p *Page) GetPage(ref string) (*Page, error) {
-	return p.s.getPageNew(p, ref)
 }
 
-func (p *Page) String() string {
-	if sourceRef := p.absoluteSourceRef(); sourceRef != "" {
-		return fmt.Sprintf("Page(%s)", sourceRef)
+func (p *pageState) pathOrTitle() string {
+	if p.File() != nil {
+		return p.File().Filename()
 	}
-	return fmt.Sprintf("Page(%q)", p.title)
-}
 
-// Scratch returns the writable context associated with this Page.
-func (p *Page) Scratch() *maps.Scratch {
-	if p.scratch == nil {
-		p.scratch = maps.NewScratch()
+	if p.Path() != "" {
+		return p.Path()
 	}
-	return p.scratch
+
+	return p.Title()
 }
 
-func (p *Page) Language() *langs.Language {
-	p.initLanguage()
-	return p.language
+func (p *pageState) posFromPage(offset int) text.Position {
+	return p.posFromInput(p.source.parsed.Input(), offset)
 }
 
-func (p *Page) Lang() string {
-	// When set, Language can be different from lang in the case where there is a
-	// content file (doc.sv.md) with language indicator, but there is no language
-	// config for that language. Then the language will fall back on the site default.
-	if p.Language() != nil {
-		return p.Language().Lang
+func (p *pageState) posFromInput(input []byte, offset int) text.Position {
+	lf := []byte("\n")
+	input = input[:offset]
+	lineNumber := bytes.Count(input, lf) + 1
+	endOfLastLine := bytes.LastIndex(input, lf)
+
+	return text.Position{
+		Filename:     p.pathOrTitle(),
+		LineNumber:   lineNumber,
+		ColumnNumber: offset - endOfLastLine,
+		Offset:       offset,
 	}
-	return p.lang
 }
 
-func (p *Page) isNewTranslation(candidate *Page) bool {
+func (p *pageState) posOffset(offset int) text.Position {
+	return p.posFromInput(p.source.parsed.Input(), offset)
+}
 
-	if p.Kind != candidate.Kind {
-		return false
+// shiftToOutputFormat is serialized. The output format idx refers to the
+// full set of output formats for all sites.
+func (p *pageState) shiftToOutputFormat(idx int) error {
+	if err := p.initPage(); err != nil {
+		return err
 	}
 
-	if p.Kind == KindPage || p.Kind == kindUnknown {
-		panic("Node type not currently supported for this op")
+	if idx >= len(p.pageOutputs) {
+		panic(fmt.Sprintf("invalid page state for %q: got output format index %d, have %d", p.pathOrTitle(), idx, len(p.pageOutputs)))
 	}
 
-	// At this point, we know that this is a traditional Node (home page, section, taxonomy)
-	// It represents the same node, but different language, if the sections is the same.
-	if len(p.sections) != len(candidate.sections) {
-		return false
+	p.pageOutput = p.pageOutputs[idx]
+
+	if p.pageOutput == nil {
+		panic(fmt.Sprintf("pageOutput is nil for output idx %d", idx))
 	}
 
-	for i := 0; i < len(p.sections); i++ {
-		if p.sections[i] != candidate.sections[i] {
-			return false
+	if idx > 0 {
+		// Check if we can reuse content from one of the previous formats.
+		for i := idx - 1; i >= 0; i-- {
+			po := p.pageOutputs[i]
+			if po.cp != nil && po.cp.reuse {
+				p.pageOutput.cp = po.cp
+				break
+			}
 		}
 	}
 
-	// Finally check that it is not already added.
-	for _, translation := range p.translations {
-		if candidate == translation {
-			return false
+	for _, r := range p.Resources().ByType(pageResourceType) {
+		rp := r.(*pageState)
+		if err := rp.shiftToOutputFormat(idx); err != nil {
+			return errors.Wrap(err, "failed to shift outputformat in Page resource")
 		}
 	}
 
-	return true
-
+	return nil
 }
 
-func (p *Page) shouldAddLanguagePrefix() bool {
-	if !p.Site.IsMultiLingual() {
-		return false
-	}
-
-	if p.s.owner.IsMultihost() {
-		return true
-	}
-
-	if p.Lang() == "" {
-		return false
-	}
+func (p *pageState) getTaxonomyNodeInfo() *taxonomyNodeInfo {
+	info := p.s.taxonomyNodes.Get(p.SectionsEntries()...)
 
-	if !p.Site.defaultContentLanguageInSubdir && p.Lang() == p.s.multilingual().DefaultLang.Lang {
-		return false
+	if info == nil {
+		// This should never happpen
+		panic(fmt.Sprintf("invalid taxonomy state for %q with sections %v", p.pathOrTitle(), p.SectionsEntries()))
 	}
 
-	return true
-}
-
-func (p *Page) initLanguage() {
-	p.languageInit.Do(func() {
-		if p.language != nil {
-			return
-		}
-
-		ml := p.s.multilingual()
-		if ml == nil {
-			panic("Multilanguage not set")
-		}
-		if p.lang == "" {
-			p.lang = ml.DefaultLang.Lang
-			p.language = ml.DefaultLang
-			return
-		}
-
-		language := ml.Language(p.lang)
-
-		if language == nil {
-			language = ml.DefaultLang
-		}
-
-		p.language = language
+	return info
 
-	})
 }
 
-func (p *Page) LanguagePrefix() string {
-	return p.Site.LanguagePrefix
+func (p *pageState) sortParentSections() {
+	if p.parent == nil {
+		return
+	}
+	page.SortByDefault(p.parent.subSections)
 }
 
-func (p *Page) addLangPathPrefixIfFlagSet(outfile string, should bool) string {
-	if helpers.IsAbsURL(outfile) {
-		return outfile
+// sourceRef returns the reference used by GetPage and ref/relref shortcodes to refer to
+// this page. It is prefixed with a "/".
+//
+// For pages that have a source file, it is returns the path to this file as an
+// absolute path rooted in this site's content dir.
+// For pages that do not (sections witout content page etc.), it returns the
+// virtual path, consistent with where you would add a source file.
+func (p *pageState) sourceRef() string {
+	if p.File() != nil {
+		sourcePath := p.File().Path()
+		if sourcePath != "" {
+			return "/" + filepath.ToSlash(sourcePath)
+		}
 	}
 
-	if !should {
-		return outfile
+	if len(p.SectionsEntries()) > 0 {
+		// no backing file, return the virtual source path
+		return "/" + p.SectionsPath()
 	}
 
-	hadSlashSuffix := strings.HasSuffix(outfile, "/")
-
-	outfile = "/" + path.Join(p.Lang(), outfile)
-	if hadSlashSuffix {
-		outfile += "/"
-	}
-	return outfile
+	return ""
 }
 
-func sectionsFromFile(fi *fileInfo) []string {
-	dirname := fi.Dir()
-	dirname = strings.Trim(dirname, helpers.FilePathSeparator)
-	if dirname == "" {
-		return nil
-	}
-	parts := strings.Split(dirname, helpers.FilePathSeparator)
+type pageStatePages []*pageState
 
-	if fi.bundleTp == bundleLeaf && len(parts) > 0 {
-		// my-section/mybundle/index.md => my-section
-		return parts[:len(parts)-1]
-	}
+// Implement sorting.
+func (ps pageStatePages) Len() int { return len(ps) }
 
-	return parts
-}
+func (ps pageStatePages) Less(i, j int) bool { return page.DefaultPageSort(ps[i], ps[j]) }
+
+func (ps pageStatePages) Swap(i, j int) { ps[i], ps[j] = ps[j], ps[i] }
 
-func kindFromFileInfo(fi *fileInfo) string {
-	if fi.TranslationBaseName() == "_index" {
-		if fi.Dir() == "" {
-			return KindHome
+// findPagePos Given a page, it will find the position in Pages
+// will return -1 if not found
+func (ps pageStatePages) findPagePos(page *pageState) int {
+	for i, x := range ps {
+		if x.File().Filename() == page.File().Filename() {
+			return i
 		}
-		// Could be index for section, taxonomy, taxonomy term
-		// We don't know enough yet to determine which
-		return kindUnknown
 	}
-	return KindPage
+	return -1
 }
 
-func (p *Page) sectionsPath() string {
-	if len(p.sections) == 0 {
-		return ""
-	}
-	if len(p.sections) == 1 {
-		return p.sections[0]
+func (ps pageStatePages) findPagePosByFilename(filename string) int {
+	for i, x := range ps {
+		if x.File().Filename() == filename {
+			return i
+		}
 	}
-
-	return path.Join(p.sections...)
+	return -1
 }
 
-func (p *Page) kindFromSections() string {
-	if len(p.sections) == 0 || len(p.s.Taxonomies) == 0 {
-		return KindSection
+func (ps pageStatePages) findPagePosByFilnamePrefix(prefix string) int {
+	if prefix == "" {
+		return -1
 	}
 
-	sectionPath := p.sectionsPath()
-
-	for k, _ := range p.s.Taxonomies {
-		if k == sectionPath {
-			return KindTaxonomyTerm
-		}
+	lenDiff := -1
+	currPos := -1
+	prefixLen := len(prefix)
 
-		if strings.HasPrefix(sectionPath, k) {
-			return KindTaxonomy
+	// Find the closest match
+	for i, x := range ps {
+		if strings.HasPrefix(x.File().Filename(), prefix) {
+			diff := len(x.File().Filename()) - prefixLen
+			if lenDiff == -1 || diff < lenDiff {
+				lenDiff = diff
+				currPos = i
+			}
 		}
 	}
-
-	return KindSection
+	return currPos
 }
 
-func (p *Page) setValuesForKind(s *Site) {
-	if p.Kind == kindUnknown {
-		// This is either a taxonomy list, taxonomy term or a section
-		nodeType := p.kindFromSections()
-
-		if nodeType == kindUnknown {
-			panic(fmt.Sprintf("Unable to determine page kind from %q", p.sections))
-		}
-
-		p.Kind = nodeType
+func content(c resource.ContentProvider) string {
+	cc, err := c.Content()
+	if err != nil {
+		panic(err)
 	}
 
-	switch p.Kind {
-	case KindHome:
-		p.URLPath.URL = "/"
-	case KindPage:
-	default:
-		if p.URLPath.URL == "" {
-			p.URLPath.URL = "/" + path.Join(p.sections...) + "/"
-		}
+	ccs, err := cast.ToStringE(cc)
+	if err != nil {
+		panic(err)
 	}
+	return ccs
 }
 
-// Used in error logs.
-func (p *Page) pathOrTitle() string {
-	if p.Filename() != "" {
-		return p.Filename()
+func (s *Site) sectionsFromFile(fi source.File) []string {
+	dirname := fi.Dir()
+	dirname = strings.Trim(dirname, helpers.FilePathSeparator)
+	if dirname == "" {
+		return nil
+	}
+	parts := strings.Split(dirname, helpers.FilePathSeparator)
+
+	if fii, ok := fi.(*fileInfo); ok {
+		if fii.bundleTp == bundleLeaf && len(parts) > 0 {
+			// my-section/mybundle/index.md => my-section
+			return parts[:len(parts)-1]
+		}
 	}
-	return p.title
-}
 
-func (p *Page) Next() *Page {
-	// TODO Remove the deprecation notice (but keep PrevPage as an alias) Hugo 0.52
-	helpers.Deprecated("Page", ".Next", "Use .PrevPage (yes, not .NextPage).", false)
-	return p.PrevPage
+	return parts
 }
 
-func (p *Page) Prev() *Page {
-	// TODO Remove the deprecation notice (but keep NextPage as an alias) Hugo 0.52
-	helpers.Deprecated("Page", ".Prev", "Use .NextPage (yes, not .PrevPage).", false)
-	return p.NextPage
+func printStackTrace(length int) string {
+	trace := make([]byte, length)
+	runtime.Stack(trace, true)
+	return string(trace)
 }
diff --git a/hugolib/page__common.go b/hugolib/page__common.go
new file mode 100644
index 00000000000..5bd7223cc2d
--- /dev/null
+++ b/hugolib/page__common.go
@@ -0,0 +1,112 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"sync"
+
+	"github.com/bep/gitmap"
+	"github.com/gohugoio/hugo/common/maps"
+	"github.com/gohugoio/hugo/compare"
+	"github.com/gohugoio/hugo/lazy"
+	"github.com/gohugoio/hugo/navigation"
+	"github.com/gohugoio/hugo/output"
+	"github.com/gohugoio/hugo/resources/page"
+	"github.com/gohugoio/hugo/resources/resource"
+)
+
+type pageCommon struct {
+	s *Site
+	m *pageMeta
+
+	// Laziliy initialized dependencies.
+	init *lazy.Init
+
+	// All of these represents the common parts of a page.Page
+	maps.Scratcher
+	navigation.PageMenusProvider
+	page.AuthorProvider
+	page.PageRenderProvider
+	page.AlternativeOutputFormatsProvider
+	page.ChildCareProvider
+	page.FileProvider
+	page.GetPageProvider
+	page.GitInfoProvider
+	page.InSectionPositioner
+	page.OutputFormatsProvider
+	page.PageMetaProvider
+	page.Positioner
+	page.RawContentProvider
+	page.RelatedKeywordsProvider
+	page.RefProvider
+	page.ShortcodeInfoProvider
+	page.SitesProvider
+	page.DeprecatedWarningPageMethods
+	page.TranslationsProvider
+	page.TreeProvider
+	resource.LanguageProvider
+	resource.ResourceDataProvider
+	resource.ResourceMetaProvider
+	resource.ResourceParamsProvider
+	resource.ResourceTypesProvider
+	resource.TranslationKeyProvider
+	compare.Eqer
+
+	// Describes how paths and URLs for this page and its descendants
+	// should look like.
+	targetPathDescriptor page.TargetPathDescriptor
+
+	layoutDescriptor     output.LayoutDescriptor
+	layoutDescriptorInit sync.Once
+
+	// The parsed page content.
+	pageContent
+
+	// Set if feature enabled and this is in a Git repo.
+	gitInfo *gitmap.GitInfo
+
+	// Positional navigation
+	posNextPrev        *nextPrev
+	posNextPrevSection *nextPrev
+
+	// Menus
+	pageMenus *pageMenus
+
+	// Internal use
+	page.InternalDependencies
+
+	// The children. Regular pages will have none.
+	pages     page.Pages
+	pagesInit sync.Once
+
+	// Any bundled resources
+	resources     resource.Resources
+	resourcesInit sync.Once
+
+	translations    page.Pages
+	allTranslations page.Pages
+
+	// Calculated an cached translation mapping key
+	translationKey     string
+	translationKeyInit sync.Once
+
+	// Will only be set for sections and regular pages.
+	parent *pageState
+
+	// Will only be set for section pages and the home page.
+	subSections page.Pages
+
+	// Set in fast render mode to force render a given page.
+	forceRender bool
+}
diff --git a/hugolib/page__data.go b/hugolib/page__data.go
new file mode 100644
index 00000000000..79a64931b4a
--- /dev/null
+++ b/hugolib/page__data.go
@@ -0,0 +1,70 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"sync"
+
+	"github.com/gohugoio/hugo/resources/page"
+)
+
+type pageData struct {
+	*pageState
+
+	dataInit sync.Once
+	data     page.Data
+}
+
+func (p *pageData) Data() interface{} {
+	p.dataInit.Do(func() {
+		p.data = make(page.Data)
+
+		if p.Kind() == page.KindPage {
+			return
+		}
+
+		switch p.Kind() {
+		case page.KindTaxonomy:
+			termInfo := p.getTaxonomyNodeInfo()
+			pluralInfo := termInfo.parent
+
+			singular := pluralInfo.singular
+			plural := pluralInfo.plural
+			term := termInfo.term
+			taxonomy := p.s.Taxonomies[plural].Get(termInfo.termKey)
+
+			p.data[singular] = taxonomy
+			p.data["Singular"] = singular
+			p.data["Plural"] = plural
+			p.data["Term"] = term
+		case page.KindTaxonomyTerm:
+			info := p.getTaxonomyNodeInfo()
+			plural := info.plural
+			singular := info.singular
+
+			p.data["Singular"] = singular
+			p.data["Plural"] = plural
+			p.data["Terms"] = p.s.Taxonomies[plural]
+			// keep the following just for legacy reasons
+			p.data["OrderedIndex"] = p.data["Terms"]
+			p.data["Index"] = p.data["Terms"]
+		}
+
+		// Assign the function to the map to make sure it is lazily initialized
+		p.data["pages"] = p.Pages
+
+	})
+
+	return p.data
+}
diff --git a/hugolib/page__menus.go b/hugolib/page__menus.go
new file mode 100644
index 00000000000..0c9616a6d45
--- /dev/null
+++ b/hugolib/page__menus.go
@@ -0,0 +1,74 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"sync"
+
+	"github.com/gohugoio/hugo/navigation"
+)
+
+type pageMenus struct {
+	p *pageState
+
+	q navigation.MenyQueryProvider
+
+	pmInit sync.Once
+	pm     navigation.PageMenus
+}
+
+func (p *pageMenus) HasMenuCurrent(menuID string, me *navigation.MenuEntry) bool {
+	p.p.s.init.menus.Do()
+	p.init()
+	return p.q.HasMenuCurrent(menuID, me)
+}
+
+func (p *pageMenus) IsMenuCurrent(menuID string, inme *navigation.MenuEntry) bool {
+	p.p.s.init.menus.Do()
+	p.init()
+	return p.q.IsMenuCurrent(menuID, inme)
+}
+
+func (p *pageMenus) Menus() navigation.PageMenus {
+	// There is a reverse dependency here. initMenus will, once, build the
+	// site menus and update any relevant page.
+	p.p.s.init.menus.Do()
+
+	return p.menus()
+}
+
+func (p *pageMenus) menus() navigation.PageMenus {
+	p.init()
+	return p.pm
+
+}
+
+func (p *pageMenus) init() {
+	p.pmInit.Do(func() {
+		p.q = navigation.NewMenuQueryProvider(
+			p.p.s.Info.sectionPagesMenu,
+			p,
+			p.p.s,
+			p.p,
+		)
+
+		var err error
+		p.pm, err = navigation.PageMenusFromPage(p.p)
+		if err != nil {
+			p.p.s.Log.ERROR.Println(p.p.wrapError(err))
+		}
+
+	})
+
+}
diff --git a/hugolib/page__meta.go b/hugolib/page__meta.go
new file mode 100644
index 00000000000..8532f5016b3
--- /dev/null
+++ b/hugolib/page__meta.go
@@ -0,0 +1,652 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"fmt"
+	"path"
+	"regexp"
+	"strings"
+	"time"
+
+	"github.com/gohugoio/hugo/related"
+
+	"github.com/gohugoio/hugo/source"
+	"github.com/markbates/inflect"
+	"github.com/mitchellh/mapstructure"
+	"github.com/pkg/errors"
+
+	"github.com/gohugoio/hugo/common/maps"
+	"github.com/gohugoio/hugo/config"
+	"github.com/gohugoio/hugo/helpers"
+
+	"github.com/gohugoio/hugo/output"
+	"github.com/gohugoio/hugo/resources/page"
+	"github.com/gohugoio/hugo/resources/page/pagemeta"
+	"github.com/gohugoio/hugo/resources/resource"
+	"github.com/spf13/cast"
+)
+
+var cjkRe = regexp.MustCompile(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`)
+
+type pageMeta struct {
+	// kind is the discriminator that identifies the different page types
+	// in the different page collections. This can, as an example, be used
+	// to to filter regular pages, find sections etc.
+	// Kind will, for the pages available to the templates, be one of:
+	// page, home, section, taxonomy and taxonomyTerm.
+	// It is of string type to make it easy to reason about in
+	// the templates.
+	kind string
+
+	// This is a standalone page not part of any page collection. These
+	// include sitemap, robotsTXT and similar. It will have no pageOutputs, but
+	// a fixed pageOutput.
+	standalone bool
+
+	bundleType string
+
+	// Params contains configuration defined in the params section of page frontmatter.
+	params map[string]interface{}
+
+	title     string
+	linkTitle string
+
+	resourcePath string
+
+	weight int
+
+	markup      string
+	contentType string
+
+	// whether the content is in a CJK language.
+	isCJKLanguage bool
+
+	layout string
+
+	aliases []string
+
+	draft bool
+
+	description string
+	keywords    []string
+
+	urlPaths pagemeta.URLPath
+
+	resource.Dates
+
+	// This is enabled if it is a leaf bundle (the "index.md" type) and it is marked as headless in front matter.
+	// Being headless means that
+	// 1. The page itself is not rendered to disk
+	// 2. It is not available in .Site.Pages etc.
+	// 3. But you can get it via .Site.GetPage
+	headless bool
+
+	// A key that maps to translation(s) of this page. This value is fetched
+	// from the page front matter.
+	translationKey string
+
+	// From front matter.
+	configuredOutputFormats output.Formats
+
+	// This is the raw front matter metadata that is going to be assigned to
+	// the Resources above.
+	resourcesMetadata []map[string]interface{}
+
+	f source.File
+
+	sections []string
+
+	// Sitemap overrides from front matter.
+	sitemap config.Sitemap
+
+	s *Site
+
+	renderingConfig *helpers.BlackFriday
+}
+
+func (p *pageMeta) Aliases() []string {
+	return p.aliases
+}
+
+func (p *pageMeta) Author() page.Author {
+	authors := p.Authors()
+
+	for _, author := range authors {
+		return author
+	}
+	return page.Author{}
+}
+
+func (p *pageMeta) Authors() page.AuthorList {
+	authorKeys, ok := p.params["authors"]
+	if !ok {
+		return page.AuthorList{}
+	}
+	authors := authorKeys.([]string)
+	if len(authors) < 1 || len(p.s.Info.Authors) < 1 {
+		return page.AuthorList{}
+	}
+
+	al := make(page.AuthorList)
+	for _, author := range authors {
+		a, ok := p.s.Info.Authors[author]
+		if ok {
+			al[author] = a
+		}
+	}
+	return al
+}
+
+func (p *pageMeta) BundleType() string {
+	return p.bundleType
+}
+
+func (p *pageMeta) Description() string {
+	return p.description
+}
+
+func (p *pageMeta) Lang() string {
+	return p.s.Lang()
+}
+
+func (p *pageMeta) Draft() bool {
+	return p.draft
+}
+
+func (p *pageMeta) File() source.File {
+	return p.f
+}
+
+func (p *pageMeta) IsHome() bool {
+	return p.Kind() == page.KindHome
+}
+
+func (p *pageMeta) Keywords() []string {
+	return p.keywords
+}
+
+func (p *pageMeta) Kind() string {
+	return p.kind
+}
+
+func (p *pageMeta) Layout() string {
+	return p.layout
+}
+
+func (p *pageMeta) LinkTitle() string {
+	if p.linkTitle != "" {
+		return p.linkTitle
+	}
+
+	return p.Title()
+}
+
+func (p *pageMeta) Name() string {
+	if p.resourcePath != "" {
+		return p.resourcePath
+	}
+	return p.Title()
+}
+
+func (p *pageMeta) IsNode() bool {
+	return !p.IsPage()
+}
+
+func (p *pageMeta) IsPage() bool {
+	return p.Kind() == page.KindPage
+}
+
+// Param is a convenience method to do lookups in Page's and Site's Params map,
+// in that order.
+//
+// This method is also implemented on SiteInfo.
+// TODO(bep) interface
+func (p *pageMeta) Param(key interface{}) (interface{}, error) {
+	return resource.Param(p, p.s.Info.Params(), key)
+}
+
+func (p *pageMeta) Params() map[string]interface{} {
+	return p.params
+}
+
+func (p *pageMeta) Path() string {
+	if p.File() != nil {
+		return p.File().Path()
+	}
+	return p.SectionsPath()
+}
+
+// RelatedKeywords implements the related.Document interface needed for fast page searches.
+func (p *pageMeta) RelatedKeywords(cfg related.IndexConfig) ([]related.Keyword, error) {
+
+	v, err := p.Param(cfg.Name)
+	if err != nil {
+		return nil, err
+	}
+
+	return cfg.ToKeywords(v)
+}
+
+func (p *pageMeta) IsSection() bool {
+	return p.Kind() == page.KindSection
+}
+
+func (p *pageMeta) Section() string {
+	if p.IsHome() {
+		return ""
+	}
+
+	if p.IsNode() {
+		if len(p.sections) == 0 {
+			// May be a sitemap or similar.
+			return ""
+		}
+		return p.sections[0]
+	}
+
+	if p.File() != nil {
+		return p.File().Section()
+	}
+
+	panic("invalid page state")
+
+}
+
+func (p *pageMeta) SectionsEntries() []string {
+	return p.sections
+}
+
+func (p *pageMeta) SectionsPath() string {
+	return path.Join(p.SectionsEntries()...)
+}
+
+func (p *pageMeta) Sitemap() config.Sitemap {
+	return p.sitemap
+}
+
+func (p *pageMeta) Title() string {
+	return p.title
+}
+
+func (p *pageMeta) Type() string {
+	if p.contentType != "" {
+		return p.contentType
+	}
+
+	if x := p.Section(); x != "" {
+		return x
+	}
+
+	return "page"
+}
+
+func (p *pageMeta) Weight() int {
+	return p.weight
+}
+
+func (pm *pageMeta) setMetadata(p *pageState, frontmatter map[string]interface{}) error {
+	if frontmatter == nil {
+		return errors.New("missing frontmatter data")
+	}
+
+	pm.params = make(map[string]interface{})
+
+	// Needed for case insensitive fetching of params values
+	maps.ToLower(frontmatter)
+
+	var mtime time.Time
+	if p.File().FileInfo() != nil {
+		mtime = p.File().FileInfo().ModTime()
+	}
+
+	var gitAuthorDate time.Time
+	if p.gitInfo != nil {
+		gitAuthorDate = p.gitInfo.AuthorDate
+	}
+
+	descriptor := &pagemeta.FrontMatterDescriptor{
+		Frontmatter:   frontmatter,
+		Params:        pm.params,
+		Dates:         &pm.Dates,
+		PageURLs:      &pm.urlPaths,
+		BaseFilename:  p.File().ContentBaseName(),
+		ModTime:       mtime,
+		GitAuthorDate: gitAuthorDate,
+	}
+
+	// Handle the date separately
+	// TODO(bep) we need to "do more" in this area so this can be split up and
+	// more easily tested without the Page, but the coupling is strong.
+	err := pm.s.frontmatterHandler.HandleDates(descriptor)
+	if err != nil {
+		p.s.Log.ERROR.Printf("Failed to handle dates for page %q: %s", p.pathOrTitle(), err)
+	}
+
+	var sitemapSet bool
+
+	var draft, published, isCJKLanguage *bool
+	for k, v := range frontmatter {
+		loki := strings.ToLower(k)
+
+		if loki == "published" { // Intentionally undocumented
+			vv, err := cast.ToBoolE(v)
+			if err == nil {
+				published = &vv
+			}
+			// published may also be a date
+			continue
+		}
+
+		if pm.s.frontmatterHandler.IsDateKey(loki) {
+			continue
+		}
+
+		switch loki {
+		case "title":
+			pm.title = cast.ToString(v)
+			pm.params[loki] = pm.title
+		case "linktitle":
+			pm.linkTitle = cast.ToString(v)
+			pm.params[loki] = pm.linkTitle
+		case "description":
+			pm.description = cast.ToString(v)
+			pm.params[loki] = pm.description
+		case "slug":
+			// Don't start or end with a -
+			pm.urlPaths.Slug = strings.Trim(cast.ToString(v), "-")
+			pm.params[loki] = pm.Slug()
+		case "url":
+			if url := cast.ToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
+				return fmt.Errorf("only relative URLs are supported, %v provided", url)
+			}
+			pm.urlPaths.URL = cast.ToString(v)
+			pm.params[loki] = pm.urlPaths.URL
+		case "type":
+			pm.contentType = cast.ToString(v)
+			pm.params[loki] = pm.contentType
+		case "keywords":
+			pm.keywords = cast.ToStringSlice(v)
+			pm.params[loki] = pm.keywords
+		case "headless":
+			// For now, only the leaf bundles ("index.md") can be headless (i.e. produce no output).
+			// We may expand on this in the future, but that gets more complex pretty fast.
+			if p.File().TranslationBaseName() == "index" {
+				pm.headless = cast.ToBool(v)
+			}
+			pm.params[loki] = pm.headless
+		case "outputs":
+			o := cast.ToStringSlice(v)
+			if len(o) > 0 {
+				// Output formats are exlicitly set in front matter, use those.
+				outFormats, err := p.s.outputFormatsConfig.GetByNames(o...)
+
+				if err != nil {
+					p.s.Log.ERROR.Printf("Failed to resolve output formats: %s", err)
+				} else {
+					pm.configuredOutputFormats = outFormats
+					pm.params[loki] = outFormats
+				}
+
+			}
+		case "draft":
+			draft = new(bool)
+			*draft = cast.ToBool(v)
+		case "layout":
+			pm.layout = cast.ToString(v)
+			pm.params[loki] = pm.layout
+		case "markup":
+			pm.markup = cast.ToString(v)
+			pm.params[loki] = pm.markup
+		case "weight":
+			pm.weight = cast.ToInt(v)
+			pm.params[loki] = pm.weight
+		case "aliases":
+			pm.aliases = cast.ToStringSlice(v)
+			for _, alias := range pm.aliases {
+				if strings.HasPrefix(alias, "http://") || strings.HasPrefix(alias, "https://") {
+					return fmt.Errorf("only relative aliases are supported, %v provided", alias)
+				}
+			}
+			pm.params[loki] = pm.aliases
+		case "sitemap":
+			p.m.sitemap = config.DecodeSitemap(p.s.siteCfg.sitemap, cast.ToStringMap(v))
+			pm.params[loki] = p.m.sitemap
+			sitemapSet = true
+		case "iscjklanguage":
+			isCJKLanguage = new(bool)
+			*isCJKLanguage = cast.ToBool(v)
+		case "translationkey":
+			pm.translationKey = cast.ToString(v)
+			pm.params[loki] = pm.translationKey
+		case "resources":
+			var resources []map[string]interface{}
+			handled := true
+
+			switch vv := v.(type) {
+			case []map[interface{}]interface{}:
+				for _, vvv := range vv {
+					resources = append(resources, cast.ToStringMap(vvv))
+				}
+			case []map[string]interface{}:
+				resources = append(resources, vv...)
+			case []interface{}:
+				for _, vvv := range vv {
+					switch vvvv := vvv.(type) {
+					case map[interface{}]interface{}:
+						resources = append(resources, cast.ToStringMap(vvvv))
+					case map[string]interface{}:
+						resources = append(resources, vvvv)
+					}
+				}
+			default:
+				handled = false
+			}
+
+			if handled {
+				pm.params[loki] = resources
+				pm.resourcesMetadata = resources
+				break
+			}
+			fallthrough
+
+		default:
+			// If not one of the explicit values, store in Params
+			switch vv := v.(type) {
+			case bool:
+				pm.params[loki] = vv
+			case string:
+				pm.params[loki] = vv
+			case int64, int32, int16, int8, int:
+				pm.params[loki] = vv
+			case float64, float32:
+				pm.params[loki] = vv
+			case time.Time:
+				pm.params[loki] = vv
+			default: // handle array of strings as well
+				switch vvv := vv.(type) {
+				case []interface{}:
+					if len(vvv) > 0 {
+						switch vvv[0].(type) {
+						case map[interface{}]interface{}: // Proper parsing structured array from YAML based FrontMatter
+							pm.params[loki] = vvv
+						case map[string]interface{}: // Proper parsing structured array from JSON based FrontMatter
+							pm.params[loki] = vvv
+						case []interface{}:
+							pm.params[loki] = vvv
+						default:
+							a := make([]string, len(vvv))
+							for i, u := range vvv {
+								a[i] = cast.ToString(u)
+							}
+
+							pm.params[loki] = a
+						}
+					} else {
+						pm.params[loki] = []string{}
+					}
+				default:
+					pm.params[loki] = vv
+				}
+			}
+		}
+	}
+
+	if !sitemapSet {
+		pm.sitemap = p.s.siteCfg.sitemap
+	}
+
+	pm.markup = helpers.GuessType(pm.markup)
+
+	if draft != nil && published != nil {
+		pm.draft = *draft
+		p.m.s.Log.WARN.Printf("page %q has both draft and published settings in its frontmatter. Using draft.", p.File().Filename())
+	} else if draft != nil {
+		pm.draft = *draft
+	} else if published != nil {
+		pm.draft = !*published
+	}
+	pm.params["draft"] = pm.draft
+
+	if isCJKLanguage != nil {
+		pm.isCJKLanguage = *isCJKLanguage
+	} else if p.s.siteCfg.hasCJKLanguage {
+		if cjkRe.Match(p.source.parsed.Input()) {
+			pm.isCJKLanguage = true
+		} else {
+			pm.isCJKLanguage = false
+		}
+	}
+
+	pm.params["iscjklanguage"] = p.m.isCJKLanguage
+
+	return nil
+}
+
+func (p *pageMeta) applyDefaultValues() error {
+	if p.markup == "" {
+		if p.File() != nil {
+			// Fall back to {file extension
+			p.markup = helpers.GuessType(p.File().Ext())
+		}
+		if p.markup == "" {
+			p.markup = "unknown"
+		}
+	}
+
+	if p.title == "" {
+		switch p.Kind() {
+		case page.KindHome:
+			p.title = p.s.Info.title
+		case page.KindSection:
+			sectionName := helpers.FirstUpper(p.sections[0])
+			if p.s.Cfg.GetBool("pluralizeListTitles") {
+				p.title = inflect.Pluralize(sectionName)
+			} else {
+				p.title = sectionName
+			}
+		case page.KindTaxonomy:
+			key := p.sections[len(p.sections)-1]
+			p.title = strings.Replace(p.s.titleFunc(key), "-", " ", -1)
+		case page.KindTaxonomyTerm:
+			p.title = p.s.titleFunc(p.sections[0])
+		case kind404:
+			p.title = "404 Page not found"
+
+		}
+	}
+
+	if p.IsNode() {
+		p.bundleType = "branch"
+	} else {
+		source := p.File()
+		if fi, ok := source.(*fileInfo); ok {
+			switch fi.bundleTp {
+			case bundleBranch:
+				p.bundleType = "branch"
+			case bundleLeaf:
+				p.bundleType = "leaf"
+			}
+		}
+	}
+
+	bfParam := getParamToLower(p, "blackfriday")
+	if bfParam != nil {
+		p.renderingConfig = p.s.ContentSpec.BlackFriday
+
+		// Create a copy so we can modify it.
+		bf := *p.s.ContentSpec.BlackFriday
+		p.renderingConfig = &bf
+		pageParam := cast.ToStringMap(bfParam)
+		if err := mapstructure.Decode(pageParam, &p.renderingConfig); err != nil {
+			return errors.WithMessage(err, "failed to decode rendering config")
+		}
+	}
+
+	return nil
+
+}
+
+// The output formats this page will be rendered to.
+func (m *pageMeta) outputFormats() output.Formats {
+	if len(m.configuredOutputFormats) > 0 {
+		return m.configuredOutputFormats
+	}
+
+	return m.s.outputFormats[m.Kind()]
+}
+
+func (p *pageMeta) Slug() string {
+	return p.urlPaths.Slug
+}
+
+func getParam(m resource.ResourceParamsProvider, key string, stringToLower bool) interface{} {
+	v := m.Params()[strings.ToLower(key)]
+
+	if v == nil {
+		return nil
+	}
+
+	switch val := v.(type) {
+	case bool:
+		return val
+	case string:
+		if stringToLower {
+			return strings.ToLower(val)
+		}
+		return val
+	case int64, int32, int16, int8, int:
+		return cast.ToInt(v)
+	case float64, float32:
+		return cast.ToFloat64(v)
+	case time.Time:
+		return val
+	case []string:
+		if stringToLower {
+			return helpers.SliceToLower(val)
+		}
+		return v
+	case map[string]interface{}: // JSON and TOML
+		return v
+	case map[interface{}]interface{}: // YAML
+		return v
+	}
+
+	//p.s.Log.ERROR.Printf("GetParam(\"%s\"): Unknown type %s\n", key, reflect.TypeOf(v))
+	return nil
+}
+
+func getParamToLower(m resource.ResourceParamsProvider, key string) interface{} {
+	return getParam(m, key, true)
+}
diff --git a/hugolib/page__new.go b/hugolib/page__new.go
new file mode 100644
index 00000000000..0f419b5daca
--- /dev/null
+++ b/hugolib/page__new.go
@@ -0,0 +1,291 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"html/template"
+	"strings"
+
+	"github.com/gohugoio/hugo/common/hugo"
+
+	"github.com/gohugoio/hugo/common/maps"
+	"github.com/gohugoio/hugo/source"
+
+	"github.com/gohugoio/hugo/parser/pageparser"
+	"github.com/pkg/errors"
+
+	"github.com/gohugoio/hugo/output"
+
+	"github.com/gohugoio/hugo/lazy"
+
+	"github.com/gohugoio/hugo/resources/page"
+	"github.com/gohugoio/hugo/resources/resource"
+)
+
+func newPageBase(metaProvider *pageMeta) (*pageState, error) {
+	if metaProvider.s == nil {
+		panic("must provide a Site")
+	}
+
+	s := metaProvider.s
+
+	ps := &pageState{
+		pageOutput: nopPageOutput,
+		pageCommon: &pageCommon{
+			FileProvider:            metaProvider,
+			AuthorProvider:          metaProvider,
+			Scratcher:               maps.NewScratcher(),
+			Positioner:              page.NopPage,
+			InSectionPositioner:     page.NopPage,
+			ResourceMetaProvider:    metaProvider,
+			ResourceParamsProvider:  metaProvider,
+			PageMetaProvider:        metaProvider,
+			RelatedKeywordsProvider: metaProvider,
+			OutputFormatsProvider:   page.NopPage,
+			ResourceTypesProvider:   pageTypesProvider,
+			RefProvider:             page.NopPage,
+			ShortcodeInfoProvider:   page.NopPage,
+			LanguageProvider:        s,
+
+			InternalDependencies: s,
+			init:                 lazy.New(),
+			m:                    metaProvider,
+			s:                    s},
+	}
+
+	siteAdapter := pageSiteAdapter{s: s, p: ps}
+
+	deprecatedWarningPage := struct {
+		source.FileWithoutOverlap
+		page.DeprecatedWarningPageMethods1
+	}{
+		FileWithoutOverlap:            metaProvider.File(),
+		DeprecatedWarningPageMethods1: &pageDeprecatedWarning{p: ps},
+	}
+
+	ps.DeprecatedWarningPageMethods = page.NewDeprecatedWarningPage(deprecatedWarningPage)
+	ps.pageMenus = &pageMenus{p: ps}
+	ps.PageMenusProvider = ps.pageMenus
+	ps.GetPageProvider = siteAdapter
+	ps.GitInfoProvider = ps
+	ps.TranslationsProvider = ps
+	ps.ResourceDataProvider = &pageData{pageState: ps}
+	ps.RawContentProvider = ps
+	ps.ChildCareProvider = ps
+	ps.TreeProvider = pageTree{p: ps}
+	ps.Eqer = ps
+	ps.TranslationKeyProvider = ps
+	ps.ShortcodeInfoProvider = ps
+	ps.PageRenderProvider = ps
+	ps.AlternativeOutputFormatsProvider = ps
+
+	return ps, nil
+
+}
+
+func newPageFromMeta(metaProvider *pageMeta) (*pageState, error) {
+	ps, err := newPageBase(metaProvider)
+	if err != nil {
+		return nil, err
+	}
+
+	if err := metaProvider.applyDefaultValues(); err != nil {
+		return nil, err
+	}
+
+	ps.init.Add(func() (interface{}, error) {
+		pp, err := newPagePaths(metaProvider.s, ps, metaProvider)
+		if err != nil {
+			return nil, err
+		}
+
+		makeOut := func(f output.Format, render bool) *pageOutput {
+			return newPageOutput(nil, ps, pp, f, render)
+		}
+
+		if ps.m.standalone {
+			ps.pageOutput = makeOut(ps.m.outputFormats()[0], true)
+		} else {
+			ps.pageOutputs = make([]*pageOutput, len(ps.s.h.renderFormats))
+			created := make(map[string]*pageOutput)
+			outputFormatsForPage := ps.m.outputFormats()
+			for i, f := range ps.s.h.renderFormats {
+				po, found := created[f.Name]
+				if !found {
+					_, shouldRender := outputFormatsForPage.GetByName(f.Name)
+					po = makeOut(f, shouldRender)
+					created[f.Name] = po
+				}
+				ps.pageOutputs[i] = po
+			}
+		}
+
+		if err := ps.initCommonProviders(pp); err != nil {
+			return nil, err
+		}
+
+		return nil, nil
+
+	})
+
+	return ps, err
+
+}
+
+// Used by the legacy 404, sitemap and robots.txt rendering
+func newPageStandalone(m *pageMeta, f output.Format) (*pageState, error) {
+	m.configuredOutputFormats = output.Formats{f}
+	m.standalone = true
+	p, err := newPageFromMeta(m)
+
+	if err != nil {
+		return nil, err
+	}
+
+	if err := p.initPage(); err != nil {
+		return nil, err
+	}
+
+	return p, nil
+
+}
+
+func newPageWithContent(f *fileInfo, s *Site, content resource.OpenReadSeekCloser) (*pageState, error) {
+	sections := s.sectionsFromFile(f)
+	kind := s.kindFromFileInfoOrSections(f, sections)
+	if kind == page.KindTaxonomy {
+		s.PathSpec.MakePathsSanitized(sections)
+	}
+
+	metaProvider := &pageMeta{kind: kind, sections: sections, s: s, f: f}
+
+	ps, err := newPageBase(metaProvider)
+	if err != nil {
+		return nil, err
+	}
+
+	gi, err := s.h.gitInfoForPage(ps)
+	if err != nil {
+		return nil, errors.Wrap(err, "failed to load Git data")
+	}
+	ps.gitInfo = gi
+
+	r, err := content()
+	if err != nil {
+		return nil, err
+	}
+	defer r.Close()
+
+	parseResult, err := pageparser.Parse(
+		r,
+		pageparser.Config{EnableEmoji: s.siteCfg.enableEmoji},
+	)
+	if err != nil {
+		return nil, err
+	}
+
+	ps.pageContent = pageContent{
+		source: rawPageContent{
+			parsed:         parseResult,
+			posMainContent: -1,
+			posSummaryEnd:  -1,
+			posBodyStart:   -1,
+		},
+	}
+
+	ps.shortcodeState = newShortcodeHandler(ps, ps.s, nil)
+
+	if err := ps.mapContent(metaProvider); err != nil {
+		return nil, ps.wrapError(err)
+	}
+
+	if err := metaProvider.applyDefaultValues(); err != nil {
+		return nil, err
+	}
+
+	ps.init.Add(func() (interface{}, error) {
+		reuseContent := ps.renderable && !ps.shortcodeState.hasShortcodes()
+
+		// Creates what's needed for each output format.
+		contentPerOutput := newPageContentOutput(ps)
+
+		pp, err := newPagePaths(s, ps, metaProvider)
+		if err != nil {
+			return nil, err
+		}
+
+		// Prepare output formats for all sites.
+		ps.pageOutputs = make([]*pageOutput, len(ps.s.h.renderFormats))
+		created := make(map[string]*pageOutput)
+		outputFormatsForPage := ps.m.outputFormats()
+
+		for i, f := range ps.s.h.renderFormats {
+			if po, found := created[f.Name]; found {
+				ps.pageOutputs[i] = po
+				continue
+			}
+
+			_, render := outputFormatsForPage.GetByName(f.Name)
+			var contentProvider *pageContentOutput
+			if reuseContent && i > 0 {
+				contentProvider = ps.pageOutputs[0].cp
+			} else {
+				var err error
+				contentProvider, err = contentPerOutput(f)
+				if err != nil {
+					return nil, err
+				}
+			}
+
+			po := newPageOutput(contentProvider, ps, pp, f, render)
+			ps.pageOutputs[i] = po
+			created[f.Name] = po
+		}
+
+		if err := ps.initCommonProviders(pp); err != nil {
+			return nil, err
+		}
+
+		return nil, nil
+	})
+
+	return ps, nil
+}
+
+type pageDeprecatedWarning struct {
+	p *pageState
+}
+
+func (p *pageDeprecatedWarning) IsDraft() bool          { return p.p.m.draft }
+func (p *pageDeprecatedWarning) Hugo() hugo.Info        { return p.p.s.Info.Hugo() }
+func (p *pageDeprecatedWarning) LanguagePrefix() string { return p.p.s.Info.LanguagePrefix }
+func (p *pageDeprecatedWarning) GetParam(key string) interface{} {
+	return p.p.m.params[strings.ToLower(key)]
+}
+func (p *pageDeprecatedWarning) RSSLink() template.URL {
+	f := p.p.OutputFormats().Get("RSS")
+	if f == nil {
+		return ""
+	}
+	return template.URL(f.Permalink())
+}
+func (p *pageDeprecatedWarning) URL() string {
+	if p.p.IsPage() && p.p.m.urlPaths.URL != "" {
+		// This is the url set in front matter
+		return p.p.m.urlPaths.URL
+	}
+	// Fall back to the relative permalink.
+	return p.p.RelPermalink()
+
+}
diff --git a/hugolib/page__output.go b/hugolib/page__output.go
new file mode 100644
index 00000000000..d38d7c852dd
--- /dev/null
+++ b/hugolib/page__output.go
@@ -0,0 +1,107 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"github.com/gohugoio/hugo/output"
+	"github.com/gohugoio/hugo/resources/page"
+	"github.com/gohugoio/hugo/resources/resource"
+)
+
+func newPageOutput(
+	cp *pageContentOutput, // may be nil
+	ps *pageState,
+	pp pagePaths,
+	f output.Format,
+	render bool) *pageOutput {
+
+	var targetPathsProvider targetPathsHolder
+	var linksProvider resource.ResourceLinksProvider
+
+	ft, found := pp.targetPaths[f.Name]
+	if !found {
+		// Link to the main output format
+		ft = pp.targetPaths[pp.OutputFormats()[0].Format.Name]
+	}
+	targetPathsProvider = ft
+	linksProvider = ft
+
+	var paginatorProvider page.PaginatorProvider = page.NopPage
+	var pag *pagePaginator
+
+	if render && ps.IsNode() {
+		pag = &pagePaginator{source: ps}
+		paginatorProvider = pag
+	}
+
+	var contentProvider page.ContentProvider = page.NopPage
+	var tableOfContentsProvider page.TableOfContentsProvider = page.NopPage
+
+	if cp != nil {
+		contentProvider = cp
+		tableOfContentsProvider = cp
+	}
+
+	providers := struct {
+		page.ContentProvider
+		page.TableOfContentsProvider
+		page.PaginatorProvider
+		resource.ResourceLinksProvider
+		targetPather
+	}{
+		contentProvider,
+		tableOfContentsProvider,
+		paginatorProvider,
+		linksProvider,
+		targetPathsProvider,
+	}
+
+	po := &pageOutput{
+		f:                      f,
+		cp:                     cp,
+		pagePerOutputProviders: providers,
+		render:                 render,
+		paginator:              pag,
+	}
+
+	return po
+
+}
+
+// We create a pageOutput for every output format combination, even if this
+// particular page isn't configured to be rendered to that format.
+type pageOutput struct {
+	// Set if this page isn't configured to be rendered to this format.
+	render bool
+
+	f output.Format
+
+	// Only set if render is set.
+	// Note that this will be lazily initialized, so only used if actually
+	// used in template(s).
+	paginator *pagePaginator
+
+	// This interface provides the functionality that is specific for this
+	// output format.
+	pagePerOutputProviders
+
+	// This may be nil.
+	cp *pageContentOutput
+}
+
+func (p *pageOutput) enablePlaceholders() {
+	if p.cp != nil {
+		p.cp.enablePlaceholders()
+	}
+}
diff --git a/hugolib/page__paths.go b/hugolib/page__paths.go
new file mode 100644
index 00000000000..0a5dad5ef3c
--- /dev/null
+++ b/hugolib/page__paths.go
@@ -0,0 +1,148 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"net/url"
+
+	"github.com/gohugoio/hugo/resources/page"
+)
+
+func newPagePaths(
+	s *Site,
+	p page.Page,
+	pm *pageMeta) (pagePaths, error) {
+
+	targetPathDescriptor, err := createTargetPathDescriptor(s, p, pm)
+	if err != nil {
+		return pagePaths{}, err
+	}
+
+	outputFormats := pm.outputFormats()
+	if len(outputFormats) == 0 {
+		outputFormats = pm.s.outputFormats[pm.Kind()]
+	}
+
+	if len(outputFormats) == 0 {
+		return pagePaths{}, nil
+	}
+
+	if pm.headless {
+		outputFormats = outputFormats[:1]
+	}
+
+	pageOutputFormats := make(page.OutputFormats, len(outputFormats))
+	targets := make(map[string]targetPathsHolder)
+
+	for i, f := range outputFormats {
+		desc := targetPathDescriptor
+		desc.Type = f
+		paths := page.CreateTargetPaths(desc)
+
+		var relPermalink, permalink string
+
+		if !pm.headless {
+			relPermalink = paths.RelPermalink(s.PathSpec)
+			permalink = paths.PermalinkForOutputFormat(s.PathSpec, f)
+		}
+
+		pageOutputFormats[i] = page.NewOutputFormat(relPermalink, permalink, len(outputFormats) == 1, f)
+
+		// Use the main format for permalinks, usually HTML.
+		permalinksIndex := 0
+		if f.Permalinkable {
+			// Unless it's permalinkable
+			permalinksIndex = i
+		}
+
+		targets[f.Name] = targetPathsHolder{
+			paths:        paths,
+			OutputFormat: pageOutputFormats[permalinksIndex]}
+
+	}
+
+	return pagePaths{
+		outputFormats:        pageOutputFormats,
+		targetPaths:          targets,
+		targetPathDescriptor: targetPathDescriptor,
+	}, nil
+
+}
+
+type pagePaths struct {
+	outputFormats page.OutputFormats
+
+	targetPaths          map[string]targetPathsHolder
+	targetPathDescriptor page.TargetPathDescriptor
+}
+
+func (l pagePaths) OutputFormats() page.OutputFormats {
+	return l.outputFormats
+}
+
+func createTargetPathDescriptor(s *Site, p page.Page, pm *pageMeta) (page.TargetPathDescriptor, error) {
+	var (
+		dir      string
+		baseName string
+	)
+
+	d := s.Deps
+
+	if p.File() != nil {
+		dir = p.File().Dir()
+		baseName = p.File().TranslationBaseName()
+	}
+
+	alwaysInSubDir := p.Kind() == kindSitemap
+
+	desc := page.TargetPathDescriptor{
+		PathSpec:    d.PathSpec,
+		Kind:        p.Kind(),
+		Sections:    p.SectionsEntries(),
+		UglyURLs:    s.Info.uglyURLs(p),
+		ForcePrefix: s.h.IsMultihost() || alwaysInSubDir,
+		Dir:         dir,
+		URL:         pm.urlPaths.URL,
+	}
+
+	if pm.Slug() != "" {
+		desc.BaseName = pm.Slug()
+	} else {
+		desc.BaseName = baseName
+	}
+
+	desc.PrefixFilePath = s.getLanguageTargetPathLang(alwaysInSubDir)
+	desc.PrefixLink = s.getLanguagePermalinkLang(alwaysInSubDir)
+
+	// Expand only page.KindPage and page.KindTaxonomy; don't expand other Kinds of Pages
+	// like page.KindSection or page.KindTaxonomyTerm because they are "shallower" and
+	// the permalink configuration values are likely to be redundant, e.g.
+	// naively expanding /category/:slug/ would give /category/categories/ for
+	// the "categories" page.KindTaxonomyTerm.
+	if p.Kind() == page.KindPage || p.Kind() == page.KindTaxonomy {
+		opath, err := d.ResourceSpec.Permalinks.Expand(p.Section(), p)
+		if err != nil {
+			return desc, err
+		}
+
+		if opath != "" {
+			opath, _ = url.QueryUnescape(opath)
+			desc.ExpandedPermalink = opath
+		}
+
+	}
+
+	return desc, nil
+
+}
diff --git a/hugolib/page__per_output.go b/hugolib/page__per_output.go
new file mode 100644
index 00000000000..05b35cc8703
--- /dev/null
+++ b/hugolib/page__per_output.go
@@ -0,0 +1,445 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"bytes"
+	"context"
+	"fmt"
+	"html/template"
+	"strings"
+	"sync"
+	"unicode/utf8"
+
+	"github.com/gohugoio/hugo/lazy"
+
+	bp "github.com/gohugoio/hugo/bufferpool"
+	"github.com/gohugoio/hugo/tpl"
+
+	"github.com/gohugoio/hugo/output"
+
+	"github.com/gohugoio/hugo/helpers"
+	"github.com/gohugoio/hugo/resources/page"
+	"github.com/gohugoio/hugo/resources/resource"
+)
+
+var (
+	nopTargetPath    = targetPathsHolder{}
+	nopPagePerOutput = struct {
+		resource.ResourceLinksProvider
+		page.ContentProvider
+		page.PageRenderProvider
+		page.PaginatorProvider
+		page.TableOfContentsProvider
+		page.AlternativeOutputFormatsProvider
+
+		targetPather
+	}{
+		page.NopPage,
+		page.NopPage,
+		page.NopPage,
+		page.NopPage,
+		page.NopPage,
+		page.NopPage,
+		nopTargetPath,
+	}
+)
+
+func newPageContentOutput(p *pageState) func(f output.Format) (*pageContentOutput, error) {
+
+	parent := p.init
+
+	return func(f output.Format) (*pageContentOutput, error) {
+		cp := &pageContentOutput{
+			p: p,
+			f: f,
+		}
+
+		initContent := func() error {
+			var err error
+			var hasVariants bool
+
+			cp.contentPlaceholders, hasVariants, err = p.shortcodeState.renderShortcodesForPage(p, f)
+			if err != nil {
+				return err
+			}
+
+			if p.render && !hasVariants {
+				// We can reuse this for the other output formats
+				cp.enableReuse()
+			}
+
+			cp.workContent = p.contentToRender(cp.contentPlaceholders)
+
+			isHTML := cp.p.m.markup == "html"
+
+			if p.renderable {
+				if !isHTML {
+					cp.workContent = cp.renderContent(p, cp.workContent)
+					tmpContent, tmpTableOfContents := helpers.ExtractTOC(cp.workContent)
+					cp.tableOfContents = helpers.BytesToHTML(tmpTableOfContents)
+					cp.workContent = tmpContent
+				}
+
+				if cp.placeholdersEnabled {
+					// ToC was accessed via .Page.TableOfContents in the shortcode,
+					// at a time when the ToC wasn't ready.
+					cp.contentPlaceholders[tocShortcodePlaceholder] = string(cp.tableOfContents)
+				}
+
+				if p.cmap.hasNonMarkdownShortcode || cp.placeholdersEnabled {
+					// There are one or more replacement tokens to be replaced.
+					cp.workContent, err = replaceShortcodeTokens(cp.workContent, cp.contentPlaceholders)
+					if err != nil {
+						return err
+					}
+				}
+
+				if cp.p.source.hasSummaryDivider {
+					if isHTML {
+						src := p.source.parsed.Input()
+
+						// Use the summary sections as they are provided by the user.
+						if p.source.posSummaryEnd != -1 {
+							cp.summary = helpers.BytesToHTML(src[p.source.posMainContent:p.source.posSummaryEnd])
+						}
+
+						if cp.p.source.posBodyStart != -1 {
+							cp.workContent = src[cp.p.source.posBodyStart:]
+						}
+
+					} else {
+						summary, content, err := splitUserDefinedSummaryAndContent(cp.p.m.markup, cp.workContent)
+						if err != nil {
+							cp.p.s.Log.ERROR.Printf("Failed to set user defined summary for page %q: %s", cp.p.pathOrTitle(), err)
+						} else {
+							cp.workContent = content
+							cp.summary = helpers.BytesToHTML(summary)
+						}
+					}
+				}
+			}
+
+			cp.content = helpers.BytesToHTML(cp.workContent)
+
+			if !p.renderable {
+				err := cp.addSelfTemplate()
+				return err
+			}
+
+			return nil
+
+		}
+
+		// Recursive loops can only happen in content files with template code (shortcodes etc.)
+		// Avoid creating new goroutines if we don't have to.
+		needTimeout := !p.renderable || p.shortcodeState.hasShortcodes()
+
+		if needTimeout {
+			cp.initMain = parent.BranchdWithTimeout(p.s.siteCfg.timeout, func(ctx context.Context) (interface{}, error) {
+				return nil, initContent()
+			})
+		} else {
+			cp.initMain = parent.Branch(func() (interface{}, error) {
+				return nil, initContent()
+			})
+		}
+
+		cp.initPlain = cp.initMain.Branch(func() (interface{}, error) {
+			cp.plain = helpers.StripHTML(string(cp.content))
+			cp.plainWords = strings.Fields(cp.plain)
+			cp.setWordCounts(p.m.isCJKLanguage)
+
+			if err := cp.setAutoSummary(); err != nil {
+				return err, nil
+			}
+
+			return nil, nil
+		})
+
+		return cp, nil
+
+	}
+
+}
+
+// pageContentOutput represents the Page content for a given output format.
+type pageContentOutput struct {
+	f output.Format
+
+	// If we can safely reuse this for other output formats.
+	reuse     bool
+	reuseInit sync.Once
+
+	p *pageState
+
+	// Lazy load dependencies
+	initMain  *lazy.Init
+	initPlain *lazy.Init
+
+	placeholdersEnabled     bool
+	placeholdersEnabledInit sync.Once
+
+	// Content state
+
+	workContent []byte
+
+	// Temporary storage of placeholders mapped to their content.
+	// These are shortcodes etc. Some of these will need to be replaced
+	// after any markup is rendered, so they share a common prefix.
+	contentPlaceholders map[string]string
+
+	// Content sections
+	content         template.HTML
+	summary         template.HTML
+	tableOfContents template.HTML
+
+	truncated bool
+
+	plainWords     []string
+	plain          string
+	fuzzyWordCount int
+	wordCount      int
+	readingTime    int
+}
+
+func (p *pageContentOutput) Content() (interface{}, error) {
+	p.p.s.initInit(p.initMain, p.p)
+	return p.content, nil
+}
+
+func (p *pageContentOutput) FuzzyWordCount() int {
+	p.p.s.initInit(p.initPlain, p.p)
+	return p.fuzzyWordCount
+}
+
+func (p *pageContentOutput) Len() int {
+	p.p.s.initInit(p.initMain, p.p)
+	return len(p.content)
+}
+
+func (p *pageContentOutput) Plain() string {
+	p.p.s.initInit(p.initPlain, p.p)
+	return p.plain
+}
+
+func (p *pageContentOutput) PlainWords() []string {
+	p.p.s.initInit(p.initPlain, p.p)
+	return p.plainWords
+}
+
+func (p *pageContentOutput) ReadingTime() int {
+	p.p.s.initInit(p.initPlain, p.p)
+	return p.readingTime
+}
+
+func (p *pageContentOutput) Summary() template.HTML {
+	p.p.s.initInit(p.initMain, p.p)
+	if !p.p.source.hasSummaryDivider {
+		p.p.s.initInit(p.initPlain, p.p)
+	}
+	return p.summary
+}
+
+func (p *pageContentOutput) TableOfContents() template.HTML {
+	p.p.s.initInit(p.initMain, p.p)
+	return p.tableOfContents
+}
+
+func (p *pageContentOutput) Truncated() bool {
+	if p.p.truncated {
+		return true
+	}
+	p.p.s.initInit(p.initPlain, p.p)
+	return p.truncated
+}
+
+func (p *pageContentOutput) WordCount() int {
+	p.p.s.initInit(p.initPlain, p.p)
+	return p.wordCount
+}
+
+func (p *pageContentOutput) setAutoSummary() error {
+	if p.p.source.hasSummaryDivider {
+		return nil
+	}
+
+	var summary string
+	var truncated bool
+
+	if p.p.m.isCJKLanguage {
+		summary, truncated = p.p.s.ContentSpec.TruncateWordsByRune(p.plainWords)
+	} else {
+		summary, truncated = p.p.s.ContentSpec.TruncateWordsToWholeSentence(p.plain)
+	}
+	p.summary = template.HTML(summary)
+
+	p.truncated = truncated
+
+	return nil
+
+}
+
+func (cp *pageContentOutput) renderContent(p page.Page, content []byte) []byte {
+	return cp.p.s.ContentSpec.RenderBytes(&helpers.RenderingContext{
+		Content: content, RenderTOC: true, PageFmt: cp.p.m.markup,
+		Cfg:        p.Language(),
+		DocumentID: p.File().UniqueID(), DocumentName: p.File().Path(),
+		Config: cp.p.getRenderingConfig()})
+}
+
+func (p *pageContentOutput) setWordCounts(isCJKLanguage bool) {
+	if isCJKLanguage {
+		p.wordCount = 0
+		for _, word := range p.plainWords {
+			runeCount := utf8.RuneCountInString(word)
+			if len(word) == runeCount {
+				p.wordCount++
+			} else {
+				p.wordCount += runeCount
+			}
+		}
+	} else {
+		p.wordCount = helpers.TotalWords(p.plain)
+	}
+
+	// TODO(bep) is set in a test. Fix that.
+	if p.fuzzyWordCount == 0 {
+		p.fuzzyWordCount = (p.wordCount + 100) / 100 * 100
+	}
+
+	if isCJKLanguage {
+		p.readingTime = (p.wordCount + 500) / 501
+	} else {
+		p.readingTime = (p.wordCount + 212) / 213
+	}
+}
+
+func (p *pageContentOutput) addSelfTemplate() error {
+	self := p.p.selfLayoutForOutput(p.f)
+	err := p.p.s.TemplateHandler().AddLateTemplate(self, string(p.content))
+	if err != nil {
+		return err
+	}
+	return nil
+}
+
+// A callback to signal that we have inserted a placeholder into the rendered
+// content. This avoids doing extra replacement work.
+func (p *pageContentOutput) enablePlaceholders() {
+	p.placeholdersEnabledInit.Do(func() {
+		p.placeholdersEnabled = true
+	})
+}
+
+func (p *pageContentOutput) enableReuse() {
+	p.reuseInit.Do(func() {
+		p.reuse = true
+	})
+}
+
+// these will be shifted out when rendering a given output format.
+type pagePerOutputProviders interface {
+	targetPather
+	page.ContentProvider
+	page.PaginatorProvider
+	page.TableOfContentsProvider
+	resource.ResourceLinksProvider
+}
+
+type targetPather interface {
+	targetPaths() page.TargetPaths
+}
+
+type targetPathsHolder struct {
+	paths page.TargetPaths
+	page.OutputFormat
+}
+
+func (t targetPathsHolder) targetPaths() page.TargetPaths {
+	return t.paths
+}
+
+func executeToString(templ tpl.Template, data interface{}) (string, error) {
+	b := bp.GetBuffer()
+	defer bp.PutBuffer(b)
+	if err := templ.Execute(b, data); err != nil {
+		return "", err
+	}
+	return b.String(), nil
+
+}
+
+func splitUserDefinedSummaryAndContent(markup string, c []byte) (summary []byte, content []byte, err error) {
+	defer func() {
+		if r := recover(); r != nil {
+			err = fmt.Errorf("summary split failed: %s", r)
+		}
+	}()
+
+	startDivider := bytes.Index(c, internalSummaryDividerBaseBytes)
+
+	if startDivider == -1 {
+		return
+	}
+
+	startTag := "p"
+	switch markup {
+	case "asciidoc":
+		startTag = "div"
+
+	}
+
+	// Walk back and forward to the surrounding tags.
+	start := bytes.LastIndex(c[:startDivider], []byte("<"+startTag))
+	end := bytes.Index(c[startDivider:], []byte("</"+startTag))
+
+	if start == -1 {
+		start = startDivider
+	} else {
+		start = startDivider - (startDivider - start)
+	}
+
+	if end == -1 {
+		end = startDivider + len(internalSummaryDividerBase)
+	} else {
+		end = startDivider + end + len(startTag) + 3
+	}
+
+	var addDiv bool
+
+	switch markup {
+	case "rst":
+		addDiv = true
+	}
+
+	withoutDivider := append(c[:start], bytes.Trim(c[end:], "\n")...)
+
+	if len(withoutDivider) > 0 {
+		summary = bytes.TrimSpace(withoutDivider[:start])
+	}
+
+	if addDiv {
+		// For the rst
+		summary = append(append([]byte(nil), summary...), []byte("</div>")...)
+	}
+
+	if err != nil {
+		return
+	}
+
+	content = bytes.TrimSpace(withoutDivider)
+
+	return
+}
diff --git a/hugolib/page_ref.go b/hugolib/page__ref.go
similarity index 56%
rename from hugolib/page_ref.go
rename to hugolib/page__ref.go
index af1ec3e7067..41bd527db98 100644
--- a/hugolib/page_ref.go
+++ b/hugolib/page__ref.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -22,24 +22,43 @@ import (
 	"github.com/pkg/errors"
 )
 
-type refArgs struct {
-	Path         string
-	Lang         string
-	OutputFormat string
+func newPageRef(p *pageState) pageRef {
+	return pageRef{p: p}
 }
 
-func (p *Page) decodeRefArgs(args map[string]interface{}) (refArgs, *Site, error) {
+type pageRef struct {
+	p *pageState
+}
+
+func (p pageRef) Ref(argsm map[string]interface{}) (string, error) {
+	return p.ref(argsm, p.p)
+}
+
+func (p pageRef) RefFrom(argsm map[string]interface{}, source interface{}) (string, error) {
+	return p.ref(argsm, source)
+}
+
+func (p pageRef) RelRef(argsm map[string]interface{}) (string, error) {
+	return p.relRef(argsm, p.p)
+}
+
+func (p pageRef) RelRefFrom(argsm map[string]interface{}, source interface{}) (string, error) {
+	return p.relRef(argsm, source)
+}
+
+func (p pageRef) decodeRefArgs(args map[string]interface{}) (refArgs, *Site, error) {
 	var ra refArgs
 	err := mapstructure.WeakDecode(args, &ra)
 	if err != nil {
 		return ra, nil, nil
 	}
-	s := p.s
 
-	if ra.Lang != "" && ra.Lang != p.Lang() {
+	s := p.p.s
+
+	if ra.Lang != "" && ra.Lang != p.p.s.Language().Lang {
 		// Find correct site
 		found := false
-		for _, ss := range p.s.owner.Sites {
+		for _, ss := range p.p.s.h.Sites {
 			if ss.Lang() == ra.Lang {
 				found = true
 				s = ss
@@ -47,7 +66,7 @@ func (p *Page) decodeRefArgs(args map[string]interface{}) (refArgs, *Site, error
 		}
 
 		if !found {
-			p.s.siteRefLinker.logNotFound(ra.Path, fmt.Sprintf("no site found with lang %q", ra.Lang), p, text.Position{})
+			p.p.s.siteRefLinker.logNotFound(ra.Path, fmt.Sprintf("no site found with lang %q", ra.Lang), nil, text.Position{})
 			return ra, nil, nil
 		}
 	}
@@ -55,18 +74,14 @@ func (p *Page) decodeRefArgs(args map[string]interface{}) (refArgs, *Site, error
 	return ra, s, nil
 }
 
-func (p *Page) Ref(argsm map[string]interface{}) (string, error) {
-	return p.ref(argsm, p)
-}
-
-func (p *Page) ref(argsm map[string]interface{}, source interface{}) (string, error) {
+func (p pageRef) ref(argsm map[string]interface{}, source interface{}) (string, error) {
 	args, s, err := p.decodeRefArgs(argsm)
 	if err != nil {
 		return "", errors.Wrap(err, "invalid arguments to Ref")
 	}
 
 	if s == nil {
-		return p.s.siteRefLinker.notFoundURL, nil
+		return p.p.s.siteRefLinker.notFoundURL, nil
 	}
 
 	if args.Path == "" {
@@ -77,18 +92,14 @@ func (p *Page) ref(argsm map[string]interface{}, source interface{}) (string, er
 
 }
 
-func (p *Page) RelRef(argsm map[string]interface{}) (string, error) {
-	return p.relRef(argsm, p)
-}
-
-func (p *Page) relRef(argsm map[string]interface{}, source interface{}) (string, error) {
+func (p pageRef) relRef(argsm map[string]interface{}, source interface{}) (string, error) {
 	args, s, err := p.decodeRefArgs(argsm)
 	if err != nil {
 		return "", errors.Wrap(err, "invalid arguments to Ref")
 	}
 
 	if s == nil {
-		return p.s.siteRefLinker.notFoundURL, nil
+		return p.p.s.siteRefLinker.notFoundURL, nil
 	}
 
 	if args.Path == "" {
@@ -98,3 +109,9 @@ func (p *Page) relRef(argsm map[string]interface{}, source interface{}) (string,
 	return s.refLink(args.Path, source, true, args.OutputFormat)
 
 }
+
+type refArgs struct {
+	Path         string
+	Lang         string
+	OutputFormat string
+}
diff --git a/hugolib/page__tree.go b/hugolib/page__tree.go
new file mode 100644
index 00000000000..a99e6f16cf3
--- /dev/null
+++ b/hugolib/page__tree.go
@@ -0,0 +1,113 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"github.com/gohugoio/hugo/common/types"
+	"github.com/gohugoio/hugo/helpers"
+	"github.com/gohugoio/hugo/resources/page"
+)
+
+type pageTree struct {
+	p *pageState
+}
+
+func (pt pageTree) IsAncestor(other interface{}) (bool, error) {
+	if pt.p == nil {
+		return false, nil
+	}
+
+	pp, err := unwrapPage(other)
+	if err != nil || pp == nil {
+		return false, err
+	}
+
+	if pt.p.Kind() == page.KindPage && len(pt.p.SectionsEntries()) == len(pp.SectionsEntries()) {
+		// A regular page is never its section's ancestor.
+		return false, nil
+	}
+
+	return helpers.HasStringsPrefix(pp.SectionsEntries(), pt.p.SectionsEntries()), nil
+}
+
+func (pt pageTree) CurrentSection() page.Page {
+	p := pt.p
+
+	if p.IsHome() || p.IsSection() {
+		return p
+	}
+
+	return p.Parent()
+}
+
+func (pt pageTree) IsDescendant(other interface{}) (bool, error) {
+	if pt.p == nil {
+		return false, nil
+	}
+	pp, err := unwrapPage(other)
+	if err != nil || pp == nil {
+		return false, err
+	}
+
+	if pp.Kind() == page.KindPage && len(pt.p.SectionsEntries()) == len(pp.SectionsEntries()) {
+		// A regular page is never its section's descendant.
+		return false, nil
+	}
+	return helpers.HasStringsPrefix(pt.p.SectionsEntries(), pp.SectionsEntries()), nil
+}
+
+func (pt pageTree) FirstSection() page.Page {
+	p := pt.p
+
+	parent := p.Parent()
+
+	if types.IsNil(parent) || parent.IsHome() {
+		return p
+	}
+
+	for {
+		current := parent
+		parent = parent.Parent()
+		if types.IsNil(parent) || parent.IsHome() {
+			return current
+		}
+	}
+
+}
+
+func (pt pageTree) InSection(other interface{}) (bool, error) {
+	if pt.p == nil || types.IsNil(other) {
+		return false, nil
+	}
+
+	pp, err := unwrapPage(other)
+	if err != nil {
+		return false, err
+	}
+
+	if pp == nil {
+		return false, nil
+	}
+
+	return pp.CurrentSection().Eq(pt.p.CurrentSection()), nil
+
+}
+
+func (pt pageTree) Parent() page.Page {
+	return pt.p.parent
+}
+
+func (pt pageTree) Sections() page.Pages {
+	return pt.p.subSections
+}
diff --git a/hugolib/page_content.go b/hugolib/page_content.go
index 924400aead2..1b40c2ae709 100644
--- a/hugolib/page_content.go
+++ b/hugolib/page_content.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -14,18 +14,9 @@
 package hugolib
 
 import (
-	"bytes"
-	"io"
+	"fmt"
 
-	"github.com/gohugoio/hugo/helpers"
-
-	errors "github.com/pkg/errors"
-
-	bp "github.com/gohugoio/hugo/bufferpool"
-
-	"github.com/gohugoio/hugo/common/herrors"
-	"github.com/gohugoio/hugo/common/text"
-	"github.com/gohugoio/hugo/parser/metadecoders"
+	"github.com/gohugoio/hugo/output"
 	"github.com/gohugoio/hugo/parser/pageparser"
 )
 
@@ -38,196 +29,107 @@ var (
 // The content related items on a Page.
 type pageContent struct {
 	renderable bool
+	selfLayout string
+
+	truncated bool
 
-	// workContent is a copy of rawContent that may be mutated during site build.
-	workContent []byte
+	cmap *pageContentMap
 
 	shortcodeState *shortcodeHandler
 
 	source rawPageContent
 }
 
-type rawPageContent struct {
-	hasSummaryDivider bool
-
-	// The AST of the parsed page. Contains information about:
-	// shortcodes, front matter, summary indicators.
-	parsed pageparser.Result
-
-	// Returns the position in bytes after any front matter.
-	posMainContent int
-}
-
-// TODO(bep) lazy consolidate
-func (p *Page) mapContent() error {
-	p.shortcodeState = newShortcodeHandler(p)
-	s := p.shortcodeState
-	p.renderable = true
-	p.source.posMainContent = -1
-
-	result := bp.GetBuffer()
-	defer bp.PutBuffer(result)
-
-	iter := p.source.parsed.Iterator()
-
-	fail := func(err error, i pageparser.Item) error {
-		return p.parseError(err, iter.Input(), i.Pos)
-	}
-
-	// the parser is guaranteed to return items in proper order or fail, so …
-	// … it's safe to keep some "global" state
-	var currShortcode shortcode
-	var ordinal int
-
-Loop:
-	for {
-		it := iter.Next()
-
-		switch {
-		case it.Type == pageparser.TypeIgnore:
-		case it.Type == pageparser.TypeHTMLStart:
-			// This is HTML without front matter. It can still have shortcodes.
-			p.renderable = false
-			result.Write(it.Val)
-		case it.IsFrontMatter():
-			f := metadecoders.FormatFromFrontMatterType(it.Type)
-			m, err := metadecoders.Default.UnmarshalToMap(it.Val, f)
-			if err != nil {
-				if fe, ok := err.(herrors.FileError); ok {
-					return herrors.ToFileErrorWithOffset(fe, iter.LineNumber()-1)
-				} else {
-					return err
+// returns the content to be processed by Blackfriday or similar.
+func (p pageContent) contentToRender(renderedShortcodes map[string]string) []byte {
+	source := p.source.parsed.Input()
+
+	c := make([]byte, 0, len(source)+(len(source)/10))
+
+	for _, it := range p.cmap.items {
+		switch v := it.(type) {
+		case pageparser.Item:
+			c = append(c, source[v.Pos:v.Pos+len(v.Val)]...)
+		case pageContentReplacement:
+			c = append(c, v.val...)
+		case *shortcode:
+			if v.doMarkup || !p.renderable {
+				// Insert the rendered shortcode.
+				renderedShortcode, found := renderedShortcodes[v.placeholder]
+				if !found {
+					// This should never happen.
+					panic(fmt.Sprintf("rendered shortcode %q not found", v.placeholder))
 				}
-			}
-			if err := p.updateMetaData(m); err != nil {
-				return err
-			}
-
-			next := iter.Peek()
-			if !next.IsDone() {
-				p.source.posMainContent = next.Pos
-			}
-
-			if !p.shouldBuild() {
-				// Nothing more to do.
-				return nil
-			}
-
-		case it.Type == pageparser.TypeLeadSummaryDivider:
-			result.Write(internalSummaryDividerPre)
-			p.source.hasSummaryDivider = true
-			// Need to determine if the page is truncated.
-			f := func(item pageparser.Item) bool {
-				if item.IsNonWhitespace() {
-					p.truncated = true
-
-					// Done
-					return false
-				}
-				return true
-			}
-			iter.PeekWalk(f)
 
-		// Handle shortcode
-		case it.IsLeftShortcodeDelim():
-			// let extractShortcode handle left delim (will do so recursively)
-			iter.Backup()
+				c = append(c, []byte(renderedShortcode)...)
 
-			currShortcode, err := s.extractShortcode(ordinal, iter, p)
-
-			if currShortcode.name != "" {
-				s.nameSet[currShortcode.name] = true
-			}
-
-			if err != nil {
-				return fail(errors.Wrap(err, "failed to extract shortcode"), it)
-			}
-
-			if currShortcode.params == nil {
-				currShortcode.params = make([]string, 0)
-			}
-
-			placeHolder := s.createShortcodePlaceholder()
-			result.WriteString(placeHolder)
-			ordinal++
-			s.shortcodes.Add(placeHolder, currShortcode)
-		case it.Type == pageparser.TypeEmoji:
-			if emoji := helpers.Emoji(it.ValStr()); emoji != nil {
-				result.Write(emoji)
 			} else {
-				result.Write(it.Val)
-			}
-		case it.IsEOF():
-			break Loop
-		case it.IsError():
-			err := fail(errors.WithStack(errors.New(it.ValStr())), it)
-			currShortcode.err = err
-			return err
+				// Insert the placeholder so we can insert the content after
+				// markdown processing.
+				c = append(c, []byte(v.placeholder)...)
 
+			}
 		default:
-			result.Write(it.Val)
+			panic(fmt.Sprintf("unkown item type %T", it))
 		}
 	}
 
-	resultBytes := make([]byte, result.Len())
-	copy(resultBytes, result.Bytes())
-	p.workContent = resultBytes
+	return c
+}
 
-	return nil
+func (p pageContent) selfLayoutForOutput(f output.Format) string {
+	if p.selfLayout == "" {
+		return ""
+	}
+	return p.selfLayout + f.Name
 }
 
-func (p *Page) parse(reader io.Reader) error {
+type rawPageContent struct {
+	hasSummaryDivider bool
 
-	parseResult, err := pageparser.Parse(
-		reader,
-		pageparser.Config{EnableEmoji: p.s.Cfg.GetBool("enableEmoji")},
-	)
-	if err != nil {
-		return err
-	}
+	// The AST of the parsed page. Contains information about:
+	// shortcodes, front matter, summary indicators.
+	parsed pageparser.Result
 
-	p.source = rawPageContent{
-		parsed: parseResult,
-	}
+	// Returns the position in bytes after any front matter.
+	posMainContent int
 
-	p.lang = p.File.Lang()
+	// These are set if we're able to determine this from the source.
+	posSummaryEnd int
+	posBodyStart  int
+}
 
-	if p.s != nil && p.s.owner != nil {
-		gi, enabled := p.s.owner.gitInfo.forPage(p)
-		if gi != nil {
-			p.GitInfo = gi
-		} else if enabled {
-			p.s.Log.INFO.Printf("Failed to find GitInfo for page %q", p.Path())
-		}
-	}
+type pageContentReplacement struct {
+	val []byte
 
-	return nil
+	source pageparser.Item
 }
 
-func (p *Page) parseError(err error, input []byte, offset int) error {
-	if herrors.UnwrapFileError(err) != nil {
-		// Use the most specific location.
-		return err
-	}
-	pos := p.posFromInput(input, offset)
-	return herrors.NewFileError("md", -1, pos.LineNumber, pos.ColumnNumber, err)
+type pageContentMap struct {
+
+	// If not, we can skip any pre-rendering of shortcodes.
+	hasMarkdownShortcode bool
 
+	// Indicates whether we must do placeholder replacements.
+	hasNonMarkdownShortcode bool
+
+	//  *shortcode, pageContentReplacement or pageparser.Item
+	items []interface{}
 }
 
-func (p *Page) posFromInput(input []byte, offset int) text.Position {
-	lf := []byte("\n")
-	input = input[:offset]
-	lineNumber := bytes.Count(input, lf) + 1
-	endOfLastLine := bytes.LastIndex(input, lf)
-
-	return text.Position{
-		Filename:     p.pathOrTitle(),
-		LineNumber:   lineNumber,
-		ColumnNumber: offset - endOfLastLine,
-		Offset:       offset,
-	}
+func (p *pageContentMap) AddBytes(item pageparser.Item) {
+	p.items = append(p.items, item)
 }
 
-func (p *Page) posFromPage(offset int) text.Position {
-	return p.posFromInput(p.source.parsed.Input(), offset)
+func (p *pageContentMap) AddReplacement(val []byte, source pageparser.Item) {
+	p.items = append(p.items, pageContentReplacement{val: val, source: source})
+}
+
+func (p *pageContentMap) AddShortcode(s *shortcode) {
+	p.items = append(p.items, s)
+	if s.doMarkup {
+		p.hasMarkdownShortcode = true
+	} else {
+		p.hasNonMarkdownShortcode = true
+	}
 }
diff --git a/hugolib/page_errors.go b/hugolib/page_errors.go
deleted file mode 100644
index 42e2a8835b3..00000000000
--- a/hugolib/page_errors.go
+++ /dev/null
@@ -1,47 +0,0 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
-	"fmt"
-
-	"github.com/gohugoio/hugo/common/herrors"
-	errors "github.com/pkg/errors"
-)
-
-func (p *Page) errorf(err error, format string, a ...interface{}) error {
-	if herrors.UnwrapErrorWithFileContext(err) != nil {
-		// More isn't always better.
-		return err
-	}
-	args := append([]interface{}{p.Lang(), p.pathOrTitle()}, a...)
-	format = "[%s] page %q: " + format
-	if err == nil {
-		errors.Errorf(format, args...)
-		return fmt.Errorf(format, args...)
-	}
-	return errors.Wrapf(err, format, args...)
-}
-
-func (p *Page) errWithFileContext(err error) error {
-
-	err, _ = herrors.WithFileContextForFile(
-		err,
-		p.Filename(),
-		p.Filename(),
-		p.s.SourceSpec.Fs.Source,
-		herrors.SimpleLineMatcher)
-
-	return err
-}
diff --git a/hugolib/page_kinds.go b/hugolib/page_kinds.go
new file mode 100644
index 00000000000..39de31a169b
--- /dev/null
+++ b/hugolib/page_kinds.go
@@ -0,0 +1,40 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"github.com/gohugoio/hugo/resources/page"
+)
+
+var (
+
+	// This is all the kinds we can expect to find in .Site.Pages.
+	allKindsInPages = []string{page.KindPage, page.KindHome, page.KindSection, page.KindTaxonomy, page.KindTaxonomyTerm}
+	allKinds        = append(allKindsInPages, []string{kindRSS, kindSitemap, kindRobotsTXT, kind404}...)
+)
+
+const (
+
+	// Temporary state.
+	kindUnknown = "unknown"
+
+	// The following are (currently) temporary nodes,
+	// i.e. nodes we create just to render in isolation.
+	kindRSS       = "RSS"
+	kindSitemap   = "sitemap"
+	kindRobotsTXT = "robotsTXT"
+	kind404       = "404"
+
+	pageResourceType = "page"
+)
diff --git a/hugolib/page_output.go b/hugolib/page_output.go
deleted file mode 100644
index 0a3eef9a6a5..00000000000
--- a/hugolib/page_output.go
+++ /dev/null
@@ -1,320 +0,0 @@
-// Copyright 2017 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
-	"fmt"
-	"html/template"
-	"os"
-	"strings"
-	"sync"
-
-	bp "github.com/gohugoio/hugo/bufferpool"
-
-	"github.com/gohugoio/hugo/tpl"
-
-	"github.com/gohugoio/hugo/resources/resource"
-
-	"github.com/gohugoio/hugo/media"
-
-	"github.com/gohugoio/hugo/output"
-)
-
-// PageOutput represents one of potentially many output formats of a given
-// Page.
-type PageOutput struct {
-	*Page
-
-	// Pagination
-	paginator     *Pager
-	paginatorInit sync.Once
-
-	// Page output specific resources
-	resources     resource.Resources
-	resourcesInit sync.Once
-
-	// Keep this to create URL/path variations, i.e. paginators.
-	targetPathDescriptor targetPathDescriptor
-
-	outputFormat output.Format
-}
-
-func (p *PageOutput) targetPath(addends ...string) (string, error) {
-	tp, err := p.createTargetPath(p.outputFormat, false, addends...)
-	if err != nil {
-		return "", err
-	}
-	return tp, nil
-}
-
-func newPageOutput(p *Page, createCopy, initContent bool, f output.Format) (*PageOutput, error) {
-	// TODO(bep) This is only needed for tests and we should get rid of it.
-	if p.targetPathDescriptorPrototype == nil {
-		if err := p.initPaths(); err != nil {
-			return nil, err
-		}
-	}
-
-	if createCopy {
-		p = p.copy(initContent)
-	}
-
-	td, err := p.createTargetPathDescriptor(f)
-
-	if err != nil {
-		return nil, err
-	}
-
-	return &PageOutput{
-		Page:                 p,
-		outputFormat:         f,
-		targetPathDescriptor: td,
-	}, nil
-}
-
-// copy creates a copy of this PageOutput with the lazy sync.Once vars reset
-// so they will be evaluated again, for word count calculations etc.
-func (p *PageOutput) copyWithFormat(f output.Format, initContent bool) (*PageOutput, error) {
-	c, err := newPageOutput(p.Page, true, initContent, f)
-	if err != nil {
-		return nil, err
-	}
-	c.paginator = p.paginator
-	return c, nil
-}
-
-func (p *PageOutput) copy() (*PageOutput, error) {
-	return p.copyWithFormat(p.outputFormat, false)
-}
-
-func (p *PageOutput) layouts(layouts ...string) ([]string, error) {
-	if len(layouts) == 0 && p.selfLayout != "" {
-		return []string{p.selfLayout}, nil
-	}
-
-	layoutDescriptor := p.layoutDescriptor
-
-	if len(layouts) > 0 {
-		layoutDescriptor.Layout = layouts[0]
-		layoutDescriptor.LayoutOverride = true
-	}
-
-	return p.s.layoutHandler.For(
-		layoutDescriptor,
-		p.outputFormat)
-}
-
-func (p *PageOutput) Render(layout ...string) template.HTML {
-	l, err := p.layouts(layout...)
-	if err != nil {
-		p.s.DistinctErrorLog.Printf("in .Render: Failed to resolve layout %q for page %q", layout, p.pathOrTitle())
-		return ""
-	}
-
-	for _, layout := range l {
-		templ, found := p.s.Tmpl.Lookup(layout)
-		if !found {
-			// This is legacy from when we had only one output format and
-			// HTML templates only. Some have references to layouts without suffix.
-			// We default to good old HTML.
-			templ, found = p.s.Tmpl.Lookup(layout + ".html")
-		}
-		if templ != nil {
-			res, err := executeToString(templ, p)
-			if err != nil {
-				p.s.DistinctErrorLog.Printf("in .Render: Failed to execute template %q: %s", layout, err)
-				return template.HTML("")
-			}
-			return template.HTML(res)
-		}
-	}
-
-	return ""
-
-}
-
-func executeToString(templ tpl.Template, data interface{}) (string, error) {
-	b := bp.GetBuffer()
-	defer bp.PutBuffer(b)
-	if err := templ.Execute(b, data); err != nil {
-		return "", err
-	}
-	return b.String(), nil
-
-}
-
-func (p *Page) Render(layout ...string) template.HTML {
-	if p.mainPageOutput == nil {
-		panic(fmt.Sprintf("programming error: no mainPageOutput for %q", p.Path()))
-	}
-	return p.mainPageOutput.Render(layout...)
-}
-
-// OutputFormats holds a list of the relevant output formats for a given resource.
-type OutputFormats []*OutputFormat
-
-// OutputFormat links to a representation of a resource.
-type OutputFormat struct {
-	// Rel constains a value that can be used to construct a rel link.
-	// This is value is fetched from the output format definition.
-	// Note that for pages with only one output format,
-	// this method will always return "canonical".
-	// As an example, the AMP output format will, by default, return "amphtml".
-	//
-	// See:
-	// https://www.ampproject.org/docs/guides/deploy/discovery
-	//
-	// Most other output formats will have "alternate" as value for this.
-	Rel string
-
-	// It may be tempting to export this, but let us hold on to that horse for a while.
-	f output.Format
-
-	p *Page
-}
-
-// Name returns this OutputFormat's name, i.e. HTML, AMP, JSON etc.
-func (o OutputFormat) Name() string {
-	return o.f.Name
-}
-
-// MediaType returns this OutputFormat's MediaType (MIME type).
-func (o OutputFormat) MediaType() media.Type {
-	return o.f.MediaType
-}
-
-// OutputFormats gives the output formats for this Page.
-func (p *Page) OutputFormats() OutputFormats {
-	var o OutputFormats
-	for _, f := range p.outputFormats {
-		o = append(o, newOutputFormat(p, f))
-	}
-	return o
-}
-
-func newOutputFormat(p *Page, f output.Format) *OutputFormat {
-	rel := f.Rel
-	isCanonical := len(p.outputFormats) == 1
-	if isCanonical {
-		rel = "canonical"
-	}
-	return &OutputFormat{Rel: rel, f: f, p: p}
-}
-
-// AlternativeOutputFormats gives the alternative output formats for this PageOutput.
-// Note that we use the term "alternative" and not "alternate" here, as it
-// does not necessarily replace the other format, it is an alternative representation.
-func (p *PageOutput) AlternativeOutputFormats() (OutputFormats, error) {
-	var o OutputFormats
-	for _, of := range p.OutputFormats() {
-		if of.f.NotAlternative || of.f.Name == p.outputFormat.Name {
-			continue
-		}
-		o = append(o, of)
-	}
-	return o, nil
-}
-
-// deleteResource removes the resource from this PageOutput and the Page. They will
-// always be of the same length, but may contain different elements.
-func (p *PageOutput) deleteResource(i int) {
-	p.resources = append(p.resources[:i], p.resources[i+1:]...)
-	p.Page.Resources = append(p.Page.Resources[:i], p.Page.Resources[i+1:]...)
-
-}
-
-func (p *PageOutput) Resources() resource.Resources {
-	p.resourcesInit.Do(func() {
-		// If the current out shares the same path as the main page output, we reuse
-		// the resource set. For the "amp" use case, we need to clone them with new
-		// base folder.
-		ff := p.outputFormats[0]
-		if p.outputFormat.Path == ff.Path {
-			p.resources = p.Page.Resources
-			return
-		}
-
-		// Clone it with new base.
-		resources := make(resource.Resources, len(p.Page.Resources))
-
-		for i, r := range p.Page.Resources {
-			if c, ok := r.(resource.Cloner); ok {
-				// Clone the same resource with a new target.
-				resources[i] = c.WithNewBase(p.outputFormat.Path)
-			} else {
-				resources[i] = r
-			}
-		}
-
-		p.resources = resources
-	})
-
-	return p.resources
-}
-
-func (p *PageOutput) renderResources() error {
-
-	for i, r := range p.Resources() {
-		src, ok := r.(resource.Source)
-		if !ok {
-			// Pages gets rendered with the owning page.
-			continue
-		}
-
-		if err := src.Publish(); err != nil {
-			if os.IsNotExist(err) {
-				// The resource has been deleted from the file system.
-				// This should be extremely rare, but can happen on live reload in server
-				// mode when the same resource is member of different page bundles.
-				p.deleteResource(i)
-			} else {
-				p.s.Log.ERROR.Printf("Failed to publish Resource for page %q: %s", p.pathOrTitle(), err)
-			}
-		} else {
-			p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Files)
-		}
-	}
-	return nil
-}
-
-// AlternativeOutputFormats is only available on the top level rendering
-// entry point, and not inside range loops on the Page collections.
-// This method is just here to inform users of that restriction.
-func (p *Page) AlternativeOutputFormats() (OutputFormats, error) {
-	return nil, fmt.Errorf("AlternativeOutputFormats only available from the top level template context for page %q", p.Path())
-}
-
-// Get gets a OutputFormat given its name, i.e. json, html etc.
-// It returns nil if not found.
-func (o OutputFormats) Get(name string) *OutputFormat {
-	for _, f := range o {
-		if strings.EqualFold(f.f.Name, name) {
-			return f
-		}
-	}
-	return nil
-}
-
-// Permalink returns the absolute permalink to this output format.
-func (o *OutputFormat) Permalink() string {
-	rel := o.p.createRelativePermalinkForOutputFormat(o.f)
-	perm, _ := o.p.s.permalinkForOutputFormat(rel, o.f)
-	return perm
-}
-
-// RelPermalink returns the relative permalink to this output format.
-func (o *OutputFormat) RelPermalink() string {
-	rel := o.p.createRelativePermalinkForOutputFormat(o.f)
-	return o.p.s.PathSpec.PrependBasePath(rel, false)
-}
diff --git a/hugolib/page_pagination.go b/hugolib/page_pagination.go
new file mode 100644
index 00000000000..93701e79921
--- /dev/null
+++ b/hugolib/page_pagination.go
@@ -0,0 +1,83 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"sync"
+
+	"github.com/gohugoio/hugo/resources/page"
+)
+
+type pagePaginator struct {
+	paginatorInit sync.Once
+	current       *page.Pager
+
+	source *pageState
+}
+
+func (p *pagePaginator) Paginate(seq interface{}, options ...interface{}) (*page.Pager, error) {
+	var initErr error
+	p.paginatorInit.Do(func() {
+		pagerSize, err := page.ResolvePagerSize(p.source.s.Cfg, options...)
+		if err != nil {
+			initErr = err
+			return
+		}
+
+		pd := p.source.targetPathDescriptor
+		pd.Type = p.source.outputFormat()
+		paginator, err := page.Paginate(pd, seq, pagerSize)
+		if err != nil {
+			initErr = err
+			return
+		}
+
+		p.current = paginator.Pagers()[0]
+
+	})
+
+	if initErr != nil {
+		return nil, initErr
+	}
+
+	return p.current, nil
+}
+
+func (p *pagePaginator) Paginator(options ...interface{}) (*page.Pager, error) {
+	var initErr error
+	p.paginatorInit.Do(func() {
+		pagerSize, err := page.ResolvePagerSize(p.source.s.Cfg, options...)
+		if err != nil {
+			initErr = err
+			return
+		}
+
+		pd := p.source.targetPathDescriptor
+		pd.Type = p.source.outputFormat()
+		paginator, err := page.Paginate(pd, p.source.Pages(), pagerSize)
+		if err != nil {
+			initErr = err
+			return
+		}
+
+		p.current = paginator.Pagers()[0]
+
+	})
+
+	if initErr != nil {
+		return nil, initErr
+	}
+
+	return p.current, nil
+}
diff --git a/hugolib/page_paths.go b/hugolib/page_paths.go
deleted file mode 100644
index 9de7b0764fc..00000000000
--- a/hugolib/page_paths.go
+++ /dev/null
@@ -1,312 +0,0 @@
-// Copyright 2017 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
-	"fmt"
-	"path/filepath"
-
-	"net/url"
-	"strings"
-
-	"github.com/gohugoio/hugo/helpers"
-	"github.com/gohugoio/hugo/output"
-)
-
-// targetPathDescriptor describes how a file path for a given resource
-// should look like on the file system. The same descriptor is then later used to
-// create both the permalinks and the relative links, paginator URLs etc.
-//
-// The big motivating behind this is to have only one source of truth for URLs,
-// and by that also get rid of most of the fragile string parsing/encoding etc.
-//
-// Page.createTargetPathDescriptor is the Page adapter.
-//
-type targetPathDescriptor struct {
-	PathSpec *helpers.PathSpec
-
-	Type output.Format
-	Kind string
-
-	Sections []string
-
-	// For regular content pages this is either
-	// 1) the Slug, if set,
-	// 2) the file base name (TranslationBaseName).
-	BaseName string
-
-	// Source directory.
-	Dir string
-
-	// Language prefix, set if multilingual and if page should be placed in its
-	// language subdir.
-	LangPrefix string
-
-	// Whether this is a multihost multilingual setup.
-	IsMultihost bool
-
-	// URL from front matter if set. Will override any Slug etc.
-	URL string
-
-	// Used to create paginator links.
-	Addends string
-
-	// The expanded permalink if defined for the section, ready to use.
-	ExpandedPermalink string
-
-	// Some types cannot have uglyURLs, even if globally enabled, RSS being one example.
-	UglyURLs bool
-}
-
-// createTargetPathDescriptor adapts a Page and the given output.Format into
-// a targetPathDescriptor. This descriptor can then be used to create paths
-// and URLs for this Page.
-func (p *Page) createTargetPathDescriptor(t output.Format) (targetPathDescriptor, error) {
-	if p.targetPathDescriptorPrototype == nil {
-		panic(fmt.Sprintf("Must run initTargetPathDescriptor() for page %q, kind %q", p.title, p.Kind))
-	}
-	d := *p.targetPathDescriptorPrototype
-	d.Type = t
-	return d, nil
-}
-
-func (p *Page) initTargetPathDescriptor() error {
-	d := &targetPathDescriptor{
-		PathSpec:    p.s.PathSpec,
-		Kind:        p.Kind,
-		Sections:    p.sections,
-		UglyURLs:    p.s.Info.uglyURLs(p),
-		Dir:         filepath.ToSlash(p.Dir()),
-		URL:         p.frontMatterURL,
-		IsMultihost: p.s.owner.IsMultihost(),
-	}
-
-	if p.Slug != "" {
-		d.BaseName = p.Slug
-	} else {
-		d.BaseName = p.TranslationBaseName()
-	}
-
-	if p.shouldAddLanguagePrefix() {
-		d.LangPrefix = p.Lang()
-	}
-
-	// Expand only KindPage and KindTaxonomy; don't expand other Kinds of Pages
-	// like KindSection or KindTaxonomyTerm because they are "shallower" and
-	// the permalink configuration values are likely to be redundant, e.g.
-	// naively expanding /category/:slug/ would give /category/categories/ for
-	// the "categories" KindTaxonomyTerm.
-	if p.Kind == KindPage || p.Kind == KindTaxonomy {
-		if override, ok := p.Site.Permalinks[p.Section()]; ok {
-			opath, err := override.Expand(p)
-			if err != nil {
-				return err
-			}
-
-			opath, _ = url.QueryUnescape(opath)
-			opath = filepath.FromSlash(opath)
-			d.ExpandedPermalink = opath
-		}
-	}
-
-	p.targetPathDescriptorPrototype = d
-	return nil
-
-}
-
-func (p *Page) initURLs() error {
-	if len(p.outputFormats) == 0 {
-		p.outputFormats = p.s.outputFormats[p.Kind]
-	}
-	target := filepath.ToSlash(p.createRelativeTargetPath())
-	rel := p.s.PathSpec.URLizeFilename(target)
-
-	var err error
-	f := p.outputFormats[0]
-	p.permalink, err = p.s.permalinkForOutputFormat(rel, f)
-	if err != nil {
-		return err
-	}
-
-	p.relTargetPathBase = strings.TrimPrefix(strings.TrimSuffix(target, f.MediaType.FullSuffix()), "/")
-	if prefix := p.s.GetLanguagePrefix(); prefix != "" {
-		// Any language code in the path will be added later.
-		p.relTargetPathBase = strings.TrimPrefix(p.relTargetPathBase, prefix+"/")
-	}
-	p.relPermalink = p.s.PathSpec.PrependBasePath(rel, false)
-	p.layoutDescriptor = p.createLayoutDescriptor()
-	return nil
-}
-
-func (p *Page) initPaths() error {
-	if err := p.initTargetPathDescriptor(); err != nil {
-		return err
-	}
-	if err := p.initURLs(); err != nil {
-		return err
-	}
-	return nil
-}
-
-// createTargetPath creates the target filename for this Page for the given
-// output.Format. Some additional URL parts can also be provided, the typical
-// use case being pagination.
-func (p *Page) createTargetPath(t output.Format, noLangPrefix bool, addends ...string) (string, error) {
-	d, err := p.createTargetPathDescriptor(t)
-	if err != nil {
-		return "", nil
-	}
-
-	if noLangPrefix {
-		d.LangPrefix = ""
-	}
-
-	if len(addends) > 0 {
-		d.Addends = filepath.Join(addends...)
-	}
-
-	return createTargetPath(d), nil
-}
-
-func createTargetPath(d targetPathDescriptor) string {
-
-	pagePath := helpers.FilePathSeparator
-
-	// The top level index files, i.e. the home page etc., needs
-	// the index base even when uglyURLs is enabled.
-	needsBase := true
-
-	isUgly := d.UglyURLs && !d.Type.NoUgly
-
-	if d.ExpandedPermalink == "" && d.BaseName != "" && d.BaseName == d.Type.BaseName {
-		isUgly = true
-	}
-
-	if d.Kind != KindPage && d.URL == "" && len(d.Sections) > 0 {
-		if d.ExpandedPermalink != "" {
-			pagePath = filepath.Join(pagePath, d.ExpandedPermalink)
-		} else {
-			pagePath = filepath.Join(d.Sections...)
-		}
-		needsBase = false
-	}
-
-	if d.Type.Path != "" {
-		pagePath = filepath.Join(pagePath, d.Type.Path)
-	}
-
-	if d.Kind != KindHome && d.URL != "" {
-		if d.IsMultihost && d.LangPrefix != "" && !strings.HasPrefix(d.URL, "/"+d.LangPrefix) {
-			pagePath = filepath.Join(d.LangPrefix, pagePath, d.URL)
-		} else {
-			pagePath = filepath.Join(pagePath, d.URL)
-		}
-
-		if d.Addends != "" {
-			pagePath = filepath.Join(pagePath, d.Addends)
-		}
-
-		if strings.HasSuffix(d.URL, "/") || !strings.Contains(d.URL, ".") {
-			pagePath = filepath.Join(pagePath, d.Type.BaseName+d.Type.MediaType.FullSuffix())
-		}
-
-	} else if d.Kind == KindPage {
-		if d.ExpandedPermalink != "" {
-			pagePath = filepath.Join(pagePath, d.ExpandedPermalink)
-
-		} else {
-			if d.Dir != "" {
-				pagePath = filepath.Join(pagePath, d.Dir)
-			}
-			if d.BaseName != "" {
-				pagePath = filepath.Join(pagePath, d.BaseName)
-			}
-		}
-
-		if d.Addends != "" {
-			pagePath = filepath.Join(pagePath, d.Addends)
-		}
-
-		if isUgly {
-			pagePath += d.Type.MediaType.FullSuffix()
-		} else {
-			pagePath = filepath.Join(pagePath, d.Type.BaseName+d.Type.MediaType.FullSuffix())
-		}
-
-		if d.LangPrefix != "" {
-			pagePath = filepath.Join(d.LangPrefix, pagePath)
-		}
-	} else {
-		if d.Addends != "" {
-			pagePath = filepath.Join(pagePath, d.Addends)
-		}
-
-		needsBase = needsBase && d.Addends == ""
-
-		// No permalink expansion etc. for node type pages (for now)
-		base := ""
-
-		if needsBase || !isUgly {
-			base = helpers.FilePathSeparator + d.Type.BaseName
-		}
-
-		pagePath += base + d.Type.MediaType.FullSuffix()
-
-		if d.LangPrefix != "" {
-			pagePath = filepath.Join(d.LangPrefix, pagePath)
-		}
-	}
-
-	pagePath = filepath.Join(helpers.FilePathSeparator, pagePath)
-
-	// Note: MakePathSanitized will lower case the path if
-	// disablePathToLower isn't set.
-	return d.PathSpec.MakePathSanitized(pagePath)
-}
-
-func (p *Page) createRelativeTargetPath() string {
-
-	if len(p.outputFormats) == 0 {
-		if p.Kind == kindUnknown {
-			panic(fmt.Sprintf("Page %q has unknown kind", p.title))
-		}
-		panic(fmt.Sprintf("Page %q missing output format(s)", p.title))
-	}
-
-	// Choose the main output format. In most cases, this will be HTML.
-	f := p.outputFormats[0]
-
-	return p.createRelativeTargetPathForOutputFormat(f)
-
-}
-
-func (p *Page) createRelativePermalinkForOutputFormat(f output.Format) string {
-	return p.s.PathSpec.URLizeFilename(p.createRelativeTargetPathForOutputFormat(f))
-}
-
-func (p *Page) createRelativeTargetPathForOutputFormat(f output.Format) string {
-	tp, err := p.createTargetPath(f, p.s.owner.IsMultihost())
-
-	if err != nil {
-		p.s.Log.ERROR.Printf("Failed to create permalink for page %q: %s", p.FullFilePath(), err)
-		return ""
-	}
-
-	// For /index.json etc. we must  use the full path.
-	if f.MediaType.FullSuffix() == ".html" && filepath.Base(tp) == "index.html" {
-		tp = strings.TrimSuffix(tp, f.BaseFilename())
-	}
-
-	return tp
-}
diff --git a/hugolib/page_paths_test.go b/hugolib/page_paths_test.go
deleted file mode 100644
index 8f8df6ec193..00000000000
--- a/hugolib/page_paths_test.go
+++ /dev/null
@@ -1,194 +0,0 @@
-// Copyright 2017 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
-	"path/filepath"
-	"strings"
-	"testing"
-
-	"github.com/gohugoio/hugo/media"
-
-	"fmt"
-
-	"github.com/gohugoio/hugo/output"
-)
-
-func TestPageTargetPath(t *testing.T) {
-
-	pathSpec := newTestDefaultPathSpec(t)
-
-	noExtNoDelimMediaType := media.TextType
-	noExtNoDelimMediaType.Suffixes = []string{}
-	noExtNoDelimMediaType.Delimiter = ""
-
-	// Netlify style _redirects
-	noExtDelimFormat := output.Format{
-		Name:      "NER",
-		MediaType: noExtNoDelimMediaType,
-		BaseName:  "_redirects",
-	}
-
-	for _, multiHost := range []bool{false, true} {
-		for _, langPrefix := range []string{"", "no"} {
-			for _, uglyURLs := range []bool{false, true} {
-				t.Run(fmt.Sprintf("multihost=%t,langPrefix=%q,uglyURLs=%t", multiHost, langPrefix, uglyURLs),
-					func(t *testing.T) {
-
-						tests := []struct {
-							name     string
-							d        targetPathDescriptor
-							expected string
-						}{
-							{"JSON home", targetPathDescriptor{Kind: KindHome, Type: output.JSONFormat}, "/index.json"},
-							{"AMP home", targetPathDescriptor{Kind: KindHome, Type: output.AMPFormat}, "/amp/index.html"},
-							{"HTML home", targetPathDescriptor{Kind: KindHome, BaseName: "_index", Type: output.HTMLFormat}, "/index.html"},
-							{"Netlify redirects", targetPathDescriptor{Kind: KindHome, BaseName: "_index", Type: noExtDelimFormat}, "/_redirects"},
-							{"HTML section list", targetPathDescriptor{
-								Kind:     KindSection,
-								Sections: []string{"sect1"},
-								BaseName: "_index",
-								Type:     output.HTMLFormat}, "/sect1/index.html"},
-							{"HTML taxonomy list", targetPathDescriptor{
-								Kind:     KindTaxonomy,
-								Sections: []string{"tags", "hugo"},
-								BaseName: "_index",
-								Type:     output.HTMLFormat}, "/tags/hugo/index.html"},
-							{"HTML taxonomy term", targetPathDescriptor{
-								Kind:     KindTaxonomy,
-								Sections: []string{"tags"},
-								BaseName: "_index",
-								Type:     output.HTMLFormat}, "/tags/index.html"},
-							{
-								"HTML page", targetPathDescriptor{
-									Kind:     KindPage,
-									Dir:      "/a/b",
-									BaseName: "mypage",
-									Sections: []string{"a"},
-									Type:     output.HTMLFormat}, "/a/b/mypage/index.html"},
-
-							{
-								"HTML page with index as base", targetPathDescriptor{
-									Kind:     KindPage,
-									Dir:      "/a/b",
-									BaseName: "index",
-									Sections: []string{"a"},
-									Type:     output.HTMLFormat}, "/a/b/index.html"},
-
-							{
-								"HTML page with special chars", targetPathDescriptor{
-									Kind:     KindPage,
-									Dir:      "/a/b",
-									BaseName: "My Page!",
-									Type:     output.HTMLFormat}, "/a/b/My-Page/index.html"},
-							{"RSS home", targetPathDescriptor{Kind: kindRSS, Type: output.RSSFormat}, "/index.xml"},
-							{"RSS section list", targetPathDescriptor{
-								Kind:     kindRSS,
-								Sections: []string{"sect1"},
-								Type:     output.RSSFormat}, "/sect1/index.xml"},
-							{
-								"AMP page", targetPathDescriptor{
-									Kind:     KindPage,
-									Dir:      "/a/b/c",
-									BaseName: "myamp",
-									Type:     output.AMPFormat}, "/amp/a/b/c/myamp/index.html"},
-							{
-								"AMP page with URL with suffix", targetPathDescriptor{
-									Kind:     KindPage,
-									Dir:      "/sect/",
-									BaseName: "mypage",
-									URL:      "/some/other/url.xhtml",
-									Type:     output.HTMLFormat}, "/some/other/url.xhtml"},
-							{
-								"JSON page with URL without suffix", targetPathDescriptor{
-									Kind:     KindPage,
-									Dir:      "/sect/",
-									BaseName: "mypage",
-									URL:      "/some/other/path/",
-									Type:     output.JSONFormat}, "/some/other/path/index.json"},
-							{
-								"JSON page with URL without suffix and no trailing slash", targetPathDescriptor{
-									Kind:     KindPage,
-									Dir:      "/sect/",
-									BaseName: "mypage",
-									URL:      "/some/other/path",
-									Type:     output.JSONFormat}, "/some/other/path/index.json"},
-							{
-								"HTML page with expanded permalink", targetPathDescriptor{
-									Kind:              KindPage,
-									Dir:               "/a/b",
-									BaseName:          "mypage",
-									ExpandedPermalink: "/2017/10/my-title",
-									Type:              output.HTMLFormat}, "/2017/10/my-title/index.html"},
-							{
-								"Paginated HTML home", targetPathDescriptor{
-									Kind:     KindHome,
-									BaseName: "_index",
-									Type:     output.HTMLFormat,
-									Addends:  "page/3"}, "/page/3/index.html"},
-							{
-								"Paginated Taxonomy list", targetPathDescriptor{
-									Kind:     KindTaxonomy,
-									BaseName: "_index",
-									Sections: []string{"tags", "hugo"},
-									Type:     output.HTMLFormat,
-									Addends:  "page/3"}, "/tags/hugo/page/3/index.html"},
-							{
-								"Regular page with addend", targetPathDescriptor{
-									Kind:     KindPage,
-									Dir:      "/a/b",
-									BaseName: "mypage",
-									Addends:  "c/d/e",
-									Type:     output.HTMLFormat}, "/a/b/mypage/c/d/e/index.html"},
-						}
-
-						for i, test := range tests {
-							test.d.PathSpec = pathSpec
-							test.d.UglyURLs = uglyURLs
-							test.d.LangPrefix = langPrefix
-							test.d.IsMultihost = multiHost
-							test.d.Dir = filepath.FromSlash(test.d.Dir)
-							isUgly := uglyURLs && !test.d.Type.NoUgly
-
-							expected := test.expected
-
-							// TODO(bep) simplify
-							if test.d.Kind == KindPage && test.d.BaseName == test.d.Type.BaseName {
-
-							} else if test.d.Kind == KindHome && test.d.Type.Path != "" {
-							} else if (!strings.HasPrefix(expected, "/index") || test.d.Addends != "") && test.d.URL == "" && isUgly {
-								expected = strings.Replace(expected,
-									"/"+test.d.Type.BaseName+"."+test.d.Type.MediaType.Suffix(),
-									"."+test.d.Type.MediaType.Suffix(), -1)
-							}
-
-							if test.d.LangPrefix != "" && !(test.d.Kind == KindPage && test.d.URL != "") {
-								expected = "/" + test.d.LangPrefix + expected
-							} else if multiHost && test.d.LangPrefix != "" && test.d.URL != "" {
-								expected = "/" + test.d.LangPrefix + expected
-							}
-
-							expected = filepath.FromSlash(expected)
-
-							pagePath := createTargetPath(test.d)
-
-							if pagePath != expected {
-								t.Fatalf("[%d] [%s] targetPath expected %q, got: %q", i, test.name, expected, pagePath)
-							}
-						}
-					})
-			}
-		}
-	}
-}
diff --git a/hugolib/page_permalink_test.go b/hugolib/page_permalink_test.go
index 76b0b86354d..ed6eb11e31e 100644
--- a/hugolib/page_permalink_test.go
+++ b/hugolib/page_permalink_test.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -63,40 +63,44 @@ func TestPermalink(t *testing.T) {
 	}
 
 	for i, test := range tests {
+		t.Run(fmt.Sprintf("%s-%d", test.file, i), func(t *testing.T) {
 
-		cfg, fs := newTestCfg()
+			cfg, fs := newTestCfg()
 
-		cfg.Set("uglyURLs", test.uglyURLs)
-		cfg.Set("canonifyURLs", test.canonifyURLs)
-		cfg.Set("baseURL", test.base)
+			cfg.Set("uglyURLs", test.uglyURLs)
+			cfg.Set("canonifyURLs", test.canonifyURLs)
+			cfg.Set("baseURL", test.base)
 
-		pageContent := fmt.Sprintf(`---
+			pageContent := fmt.Sprintf(`---
 title: Page
 slug: %q
 url: %q
+output: ["HTML"]
 ---
 Content
 `, test.slug, test.url)
 
-		writeSource(t, fs, filepath.Join("content", filepath.FromSlash(test.file)), pageContent)
+			writeSource(t, fs, filepath.Join("content", filepath.FromSlash(test.file)), pageContent)
 
-		s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
-		require.Len(t, s.RegularPages, 1)
+			s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
+			require.Len(t, s.RegularPages(), 1)
 
-		p := s.RegularPages[0]
+			p := s.RegularPages()[0]
 
-		u := p.Permalink()
+			u := p.Permalink()
 
-		expected := test.expectedAbs
-		if u != expected {
-			t.Fatalf("[%d] Expected abs url: %s, got: %s", i, expected, u)
-		}
+			expected := test.expectedAbs
+			if u != expected {
+				t.Fatalf("[%d] Expected abs url: %s, got: %s", i, expected, u)
+			}
 
-		u = p.RelPermalink()
+			u = p.RelPermalink()
 
-		expected = test.expectedRel
-		if u != expected {
-			t.Errorf("[%d] Expected rel url: %s, got: %s", i, expected, u)
-		}
+			expected = test.expectedRel
+			if u != expected {
+				t.Errorf("[%d] Expected rel url: %s, got: %s", i, expected, u)
+			}
+		})
 	}
+
 }
diff --git a/hugolib/page_position.go b/hugolib/page_position.go
new file mode 100644
index 00000000000..458b3e4234f
--- /dev/null
+++ b/hugolib/page_position.go
@@ -0,0 +1,76 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"github.com/gohugoio/hugo/lazy"
+	"github.com/gohugoio/hugo/resources/page"
+)
+
+func newPagePosition(n *nextPrev) pagePosition {
+	return pagePosition{nextPrev: n}
+}
+
+func newPagePositionInSection(n *nextPrev) pagePositionInSection {
+	return pagePositionInSection{nextPrev: n}
+
+}
+
+type nextPrev struct {
+	init     *lazy.Init
+	prevPage page.Page
+	nextPage page.Page
+}
+
+func (n *nextPrev) next() page.Page {
+	n.init.Do()
+	return n.nextPage
+}
+
+func (n *nextPrev) prev() page.Page {
+	n.init.Do()
+	return n.prevPage
+}
+
+type pagePosition struct {
+	*nextPrev
+}
+
+func (p pagePosition) Next() page.Page {
+	return p.next()
+}
+
+func (p pagePosition) NextPage() page.Page {
+	return p.Next()
+}
+
+func (p pagePosition) Prev() page.Page {
+	return p.prev()
+}
+
+func (p pagePosition) PrevPage() page.Page {
+	return p.Prev()
+}
+
+type pagePositionInSection struct {
+	*nextPrev
+}
+
+func (p pagePositionInSection) NextInSection() page.Page {
+	return p.next()
+}
+
+func (p pagePositionInSection) PrevInSection() page.Page {
+	return p.prev()
+}
diff --git a/hugolib/page_taxonomy_test.go b/hugolib/page_taxonomy_test.go
deleted file mode 100644
index ed1d2565d69..00000000000
--- a/hugolib/page_taxonomy_test.go
+++ /dev/null
@@ -1,96 +0,0 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
-	"reflect"
-	"strings"
-	"testing"
-)
-
-var pageYamlWithTaxonomiesA = `---
-tags: ['a', 'B', 'c']
-categories: 'd'
----
-YAML frontmatter with tags and categories taxonomy.`
-
-var pageYamlWithTaxonomiesB = `---
-tags:
- - "a"
- - "B"
- - "c"
-categories: 'd'
----
-YAML frontmatter with tags and categories taxonomy.`
-
-var pageYamlWithTaxonomiesC = `---
-tags: 'E'
-categories: 'd'
----
-YAML frontmatter with tags and categories taxonomy.`
-
-var pageJSONWithTaxonomies = `{
-  "categories": "D",
-  "tags": [
-    "a",
-    "b",
-    "c"
-  ]
-}
-JSON Front Matter with tags and categories`
-
-var pageTomlWithTaxonomies = `+++
-tags = [ "a", "B", "c" ]
-categories = "d"
-+++
-TOML Front Matter with tags and categories`
-
-func TestParseTaxonomies(t *testing.T) {
-	t.Parallel()
-	for _, test := range []string{pageTomlWithTaxonomies,
-		pageJSONWithTaxonomies,
-		pageYamlWithTaxonomiesA,
-		pageYamlWithTaxonomiesB,
-		pageYamlWithTaxonomiesC,
-	} {
-
-		s := newTestSite(t)
-		p, _ := s.NewPage("page/with/taxonomy")
-		_, err := p.ReadFrom(strings.NewReader(test))
-		if err != nil {
-			t.Fatalf("Failed parsing %q: %s", test, err)
-		}
-
-		param := p.getParamToLower("tags")
-
-		if params, ok := param.([]string); ok {
-			expected := []string{"a", "b", "c"}
-			if !reflect.DeepEqual(params, expected) {
-				t.Errorf("Expected %s: got: %s", expected, params)
-			}
-		} else if params, ok := param.(string); ok {
-			expected := "e"
-			if params != expected {
-				t.Errorf("Expected %s: got: %s", expected, params)
-			}
-		}
-
-		param = p.getParamToLower("categories")
-		singleparam := param.(string)
-
-		if singleparam != "d" {
-			t.Fatalf("Expected: d, got: %s", singleparam)
-		}
-	}
-}
diff --git a/hugolib/page_test.go b/hugolib/page_test.go
index 1db1d3522f2..5e9ac696c92 100644
--- a/hugolib/page_test.go
+++ b/hugolib/page_test.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -14,88 +14,34 @@
 package hugolib
 
 import (
-	"bytes"
 	"fmt"
 	"html/template"
 	"os"
 
 	"path/filepath"
-	"reflect"
-	"sort"
 	"strings"
 	"testing"
 	"time"
 
 	"github.com/gohugoio/hugo/hugofs"
-	"github.com/spf13/afero"
 
+	"github.com/gohugoio/hugo/resources/page"
+	"github.com/gohugoio/hugo/resources/resource"
+
+	"github.com/spf13/afero"
 	"github.com/spf13/viper"
 
 	"github.com/gohugoio/hugo/deps"
 	"github.com/gohugoio/hugo/helpers"
-	"github.com/spf13/cast"
 	"github.com/stretchr/testify/assert"
 	"github.com/stretchr/testify/require"
 )
 
-var emptyPage = ""
-
 const (
-	homePage                             = "---\ntitle: Home\n---\nHome Page Content\n"
-	simplePage                           = "---\ntitle: Simple\n---\nSimple Page\n"
-	renderNoFrontmatter                  = "<!doctype><html><head></head><body>This is a test</body></html>"
-	contentNoFrontmatter                 = "Page without front matter.\n"
-	contentWithCommentedFrontmatter      = "<!--\n+++\ntitle = \"Network configuration\"\ndescription = \"Docker networking\"\nkeywords = [\"network\"]\n[menu.main]\nparent= \"smn_administrate\"\n+++\n-->\n\n# Network configuration\n\n##\nSummary"
-	contentWithCommentedTextFrontmatter  = "<!--[metaData]>\n+++\ntitle = \"Network configuration\"\ndescription = \"Docker networking\"\nkeywords = [\"network\"]\n[menu.main]\nparent= \"smn_administrate\"\n+++\n<![end-metadata]-->\n\n# Network configuration\n\n##\nSummary"
-	contentWithCommentedLongFrontmatter  = "<!--[metaData123456789012345678901234567890]>\n+++\ntitle = \"Network configuration\"\ndescription = \"Docker networking\"\nkeywords = [\"network\"]\n[menu.main]\nparent= \"smn_administrate\"\n+++\n<![end-metadata]-->\n\n# Network configuration\n\n##\nSummary"
-	contentWithCommentedLong2Frontmatter = "<!--[metaData]>\n+++\ntitle = \"Network configuration\"\ndescription = \"Docker networking\"\nkeywords = [\"network\"]\n[menu.main]\nparent= \"smn_administrate\"\n+++\n<![end-metadata123456789012345678901234567890]-->\n\n# Network configuration\n\n##\nSummary"
-	invalidFrontmatterShortDelim         = `
---
-title: Short delim start
----
-Short Delim
-`
-
-	invalidFrontmatterShortDelimEnding = `
----
-title: Short delim ending
---
-Short Delim
-`
+	homePage   = "---\ntitle: Home\n---\nHome Page Content\n"
+	simplePage = "---\ntitle: Simple\n---\nSimple Page\n"
 
-	invalidFrontmatterLadingWs = `
-
- ---
-title: Leading WS
----
-Leading
-`
-
-	simplePageJSON = `
-{
-"title": "spf13-vim 3.0 release and new website",
-"description": "spf13-vim is a cross platform distribution of vim plugins and resources for Vim.",
-"tags": [ ".vimrc", "plugins", "spf13-vim", "VIm" ],
-"date": "2012-04-06",
-"categories": [
-    "Development",
-    "VIM"
-],
-"slug": "-spf13-vim-3-0-release-and-new-website-"
-}
-
-Content of the file goes Here
-`
-
-	simplePageRFC3339Date  = "---\ntitle: RFC3339 Date\ndate: \"2013-05-17T16:59:30Z\"\n---\nrfc3339 content"
-	simplePageJSONMultiple = `
-{
-	"title": "foobar",
-	"customData": { "foo": "bar" },
-	"date": "2012-08-06"
-}
-Some text
-`
+	simplePageRFC3339Date = "---\ntitle: RFC3339 Date\ndate: \"2013-05-17T16:59:30Z\"\n---\nrfc3339 content"
 
 	simplePageWithSummaryDelimiter = `---
 title: Simple
@@ -135,14 +81,6 @@ title: Simple
 Summary Same Line<!--more-->
 
 Some more text
-`
-
-	simplePageWithSummaryDelimiterOnlySummary = `---
-title: Simple
----
-Summary text
-
-<!--more-->
 `
 
 	simplePageWithAllCJKRunes = `---
@@ -334,156 +272,17 @@ date: '2013-10-15T06:16:13'
 UTF8 Page With Date`
 )
 
-var pageWithVariousFrontmatterTypes = `+++
-a_string = "bar"
-an_integer = 1
-a_float = 1.3
-a_bool = false
-a_date = 1979-05-27T07:32:00Z
-
-[a_table]
-a_key = "a_value"
-+++
-Front Matter with various frontmatter types`
-
-var pageWithCalendarYAMLFrontmatter = `---
-type: calendar
-weeks:
-  -
-    start: "Jan 5"
-    days:
-      - activity: class
-        room: EN1000
-      - activity: lab
-      - activity: class
-      - activity: lab
-      - activity: class
-  -
-    start: "Jan 12"
-    days:
-      - activity: class
-      - activity: lab
-      - activity: class
-      - activity: lab
-      - activity: exam
----
-
-Hi.
-`
-
-var pageWithCalendarJSONFrontmatter = `{
-  "type": "calendar",
-  "weeks": [
-    {
-      "start": "Jan 5",
-      "days": [
-        { "activity": "class", "room": "EN1000" },
-        { "activity": "lab" },
-        { "activity": "class" },
-        { "activity": "lab" },
-        { "activity": "class" }
-      ]
-    },
-    {
-      "start": "Jan 12",
-      "days": [
-        { "activity": "class" },
-        { "activity": "lab" },
-        { "activity": "class" },
-        { "activity": "lab" },
-        { "activity": "exam" }
-      ]
-    }
-  ]
-}
-
-Hi.
-`
-
-var pageWithCalendarTOMLFrontmatter = `+++
-type = "calendar"
-
-[[weeks]]
-start = "Jan 5"
-
-[[weeks.days]]
-activity = "class"
-room = "EN1000"
-
-[[weeks.days]]
-activity = "lab"
-
-[[weeks.days]]
-activity = "class"
-
-[[weeks.days]]
-activity = "lab"
-
-[[weeks.days]]
-activity = "class"
-
-[[weeks]]
-start = "Jan 12"
-
-[[weeks.days]]
-activity = "class"
-
-[[weeks.days]]
-activity = "lab"
-
-[[weeks.days]]
-activity = "class"
-
-[[weeks.days]]
-activity = "lab"
-
-[[weeks.days]]
-activity = "exam"
-+++
-
-Hi.
-`
-
-func checkError(t *testing.T, err error, expected string) {
-	if err == nil {
-		t.Fatalf("err is nil.  Expected: %s", expected)
-	}
-	if !strings.Contains(err.Error(), expected) {
-		t.Errorf("err.Error() returned: '%s'.  Expected: '%s'", err.Error(), expected)
-	}
-}
-
-func TestDegenerateEmptyPageZeroLengthName(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	_, err := s.NewPage("")
-	if err == nil {
-		t.Fatalf("A zero length page name must return an error")
-	}
-
-	checkError(t, err, "Zero length page name")
-}
-
-func TestDegenerateEmptyPage(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	_, err := s.newPageFrom(strings.NewReader(emptyPage), "test")
-	if err != nil {
-		t.Fatalf("Empty files should not trigger an error. Should be able to touch a file while watching without erroring out.")
-	}
-}
-
-func checkPageTitle(t *testing.T, page *Page, title string) {
-	if page.title != title {
-		t.Fatalf("Page title is: %s.  Expected %s", page.title, title)
+func checkPageTitle(t *testing.T, page page.Page, title string) {
+	if page.Title() != title {
+		t.Fatalf("Page title is: %s.  Expected %s", page.Title(), title)
 	}
 }
 
-func checkPageContent(t *testing.T, page *Page, content string, msg ...interface{}) {
-	a := normalizeContent(content)
-	b := normalizeContent(string(page.content()))
+func checkPageContent(t *testing.T, page page.Page, expected string, msg ...interface{}) {
+	a := normalizeContent(expected)
+	b := normalizeContent(content(page))
 	if a != b {
-		t.Log(trace())
+		t.Log(stackTrace())
 		t.Fatalf("Page content is:\n%q\nExpected:\n%q (%q)", b, a, msg)
 	}
 }
@@ -499,42 +298,29 @@ func normalizeContent(c string) string {
 	return strings.TrimSpace(norm)
 }
 
-func checkPageTOC(t *testing.T, page *Page, toc string) {
-	if page.TableOfContents != template.HTML(toc) {
-		t.Fatalf("Page TableOfContents is: %q.\nExpected %q", page.TableOfContents, toc)
+func checkPageTOC(t *testing.T, page page.Page, toc string) {
+	if page.TableOfContents() != template.HTML(toc) {
+		t.Fatalf("Page TableOfContents is: %q.\nExpected %q", page.TableOfContents(), toc)
 	}
 }
 
-func checkPageSummary(t *testing.T, page *Page, summary string, msg ...interface{}) {
-	a := normalizeContent(string(page.summary))
+func checkPageSummary(t *testing.T, page page.Page, summary string, msg ...interface{}) {
+	a := normalizeContent(string(page.Summary()))
 	b := normalizeContent(summary)
 	if a != b {
 		t.Fatalf("Page summary is:\n%q.\nExpected\n%q (%q)", a, b, msg)
 	}
 }
 
-func checkPageType(t *testing.T, page *Page, pageType string) {
+func checkPageType(t *testing.T, page page.Page, pageType string) {
 	if page.Type() != pageType {
 		t.Fatalf("Page type is: %s.  Expected: %s", page.Type(), pageType)
 	}
 }
 
-func checkPageDate(t *testing.T, page *Page, time time.Time) {
-	if page.Date != time {
-		t.Fatalf("Page date is: %s.  Expected: %s", page.Date, time)
-	}
-}
-
-func checkTruncation(t *testing.T, page *Page, shouldBe bool, msg string) {
-	if page.Summary() == "" {
-		t.Fatal("page has no summary, can not check truncation")
-	}
-	if page.truncated != shouldBe {
-		if shouldBe {
-			t.Fatalf("page wasn't truncated: %s", msg)
-		} else {
-			t.Fatalf("page was truncated: %s", msg)
-		}
+func checkPageDate(t *testing.T, page page.Page, time time.Time) {
+	if page.Date() != time {
+		t.Fatalf("Page date is: %s.  Expected: %s", page.Date(), time)
 	}
 }
 
@@ -562,7 +348,7 @@ func normalizeExpected(ext, str string) string {
 }
 
 func testAllMarkdownEnginesForPages(t *testing.T,
-	assertFunc func(t *testing.T, ext string, pages Pages), settings map[string]interface{}, pageSources ...string) {
+	assertFunc func(t *testing.T, ext string, pages page.Pages), settings map[string]interface{}, pageSources ...string) {
 
 	engines := []struct {
 		ext           string
@@ -607,33 +393,93 @@ func testAllMarkdownEnginesForPages(t *testing.T,
 
 		s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
 
-		require.Len(t, s.RegularPages, len(pageSources))
+		require.Len(t, s.RegularPages(), len(pageSources))
 
-		assertFunc(t, e.ext, s.RegularPages)
+		assertFunc(t, e.ext, s.RegularPages())
 
 		home, err := s.Info.Home()
 		require.NoError(t, err)
 		require.NotNil(t, home)
-		require.Equal(t, homePath, home.Path())
-		require.Contains(t, home.content(), "Home Page Content")
+		require.Equal(t, homePath, home.File().Path())
+		require.Contains(t, content(home), "Home Page Content")
+
+	}
 
+}
+
+// Issue #1076
+func TestPageWithDelimiterForMarkdownThatCrossesBorder(t *testing.T) {
+	t.Parallel()
+	cfg, fs := newTestCfg()
+
+	writeSource(t, fs, filepath.Join("content", "simple.md"), simplePageWithSummaryDelimiterAndMarkdownThatCrossesBorder)
+
+	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
+
+	require.Len(t, s.RegularPages(), 1)
+
+	p := s.RegularPages()[0]
+
+	if p.Summary() != template.HTML(
+		"<p>The <a href=\"http://gohugo.io/\">best static site generator</a>.<sup class=\"footnote-ref\" id=\"fnref:1\"><a href=\"#fn:1\">1</a></sup></p>") {
+		t.Fatalf("Got summary:\n%q", p.Summary())
+	}
+
+	c := content(p)
+	if c != "<p>The <a href=\"http://gohugo.io/\">best static site generator</a>.<sup class=\"footnote-ref\" id=\"fnref:1\"><a href=\"#fn:1\">1</a></sup></p>\n\n<div class=\"footnotes\">\n\n<hr />\n\n<ol>\n<li id=\"fn:1\">Many people say so.\n <a class=\"footnote-return\" href=\"#fnref:1\"><sup>[return]</sup></a></li>\n</ol>\n</div>" {
+		t.Fatalf("Got content:\n%q", c)
+	}
+}
+
+func TestPageDatesAllKinds(t *testing.T) {
+	t.Parallel()
+	assert := assert.New(t)
+
+	pageContent := `
+---
+title: Page
+date: 2017-01-15
+tags: ["hugo"]
+categories: ["cool stuff"]
+---
+`
+
+	b := newTestSitesBuilder(t)
+	b.WithSimpleConfigFile().WithContent("page.md", pageContent)
+	b.WithSimpleConfigFile().WithContent("blog/page.md", pageContent)
+
+	b.CreateSites().Build(BuildCfg{})
+
+	assert.Equal(1, len(b.H.Sites))
+	s := b.H.Sites[0]
+
+	checkDate := func(t time.Time, msg string) {
+		assert.Equal(2017, t.Year(), msg)
+	}
+
+	checkDated := func(d resource.Dated, msg string) {
+		checkDate(d.Date(), "date: "+msg)
+		checkDate(d.Lastmod(), "lastmod: "+msg)
 	}
+	for _, p := range s.Pages() {
+		checkDated(p, p.Kind())
+	}
+	checkDate(s.Info.LastChange(), "site")
 
 }
 
 func TestCreateNewPage(t *testing.T) {
 	t.Parallel()
-	assertFunc := func(t *testing.T, ext string, pages Pages) {
+	assertFunc := func(t *testing.T, ext string, pages page.Pages) {
 		p := pages[0]
 
 		// issue #2290: Path is relative to the content dir and will continue to be so.
-		require.Equal(t, filepath.FromSlash(fmt.Sprintf("p0.%s", ext)), p.Path())
+		require.Equal(t, filepath.FromSlash(fmt.Sprintf("p0.%s", ext)), p.File().Path())
 		assert.False(t, p.IsHome())
 		checkPageTitle(t, p, "Simple")
 		checkPageContent(t, p, normalizeExpected(ext, "<p>Simple Page</p>\n"))
 		checkPageSummary(t, p, "Simple Page")
 		checkPageType(t, p, "page")
-		checkTruncation(t, p, false, "simple short page")
 	}
 
 	settings := map[string]interface{}{
@@ -645,43 +491,17 @@ func TestCreateNewPage(t *testing.T) {
 
 func TestPageWithDelimiter(t *testing.T) {
 	t.Parallel()
-	assertFunc := func(t *testing.T, ext string, pages Pages) {
+	assertFunc := func(t *testing.T, ext string, pages page.Pages) {
 		p := pages[0]
 		checkPageTitle(t, p, "Simple")
 		checkPageContent(t, p, normalizeExpected(ext, "<p>Summary Next Line</p>\n\n<p>Some more text</p>\n"), ext)
 		checkPageSummary(t, p, normalizeExpected(ext, "<p>Summary Next Line</p>"), ext)
 		checkPageType(t, p, "page")
-		checkTruncation(t, p, true, "page with summary delimiter")
 	}
 
 	testAllMarkdownEnginesForPages(t, assertFunc, nil, simplePageWithSummaryDelimiter)
 }
 
-// Issue #1076
-func TestPageWithDelimiterForMarkdownThatCrossesBorder(t *testing.T) {
-	t.Parallel()
-	cfg, fs := newTestCfg()
-
-	writeSource(t, fs, filepath.Join("content", "simple.md"), simplePageWithSummaryDelimiterAndMarkdownThatCrossesBorder)
-
-	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
-
-	require.Len(t, s.RegularPages, 1)
-
-	p := s.RegularPages[0]
-
-	if p.Summary() != template.HTML(
-		"<p>The <a href=\"http://gohugo.io/\">best static site generator</a>.<sup class=\"footnote-ref\" id=\"fnref:1\"><a href=\"#fn:1\">1</a></sup></p>") {
-		t.Fatalf("Got summary:\n%q", p.Summary())
-	}
-
-	if p.content() != template.HTML(
-		"<p>The <a href=\"http://gohugo.io/\">best static site generator</a>.<sup class=\"footnote-ref\" id=\"fnref:1\"><a href=\"#fn:1\">1</a></sup></p>\n\n<div class=\"footnotes\">\n\n<hr />\n\n<ol>\n<li id=\"fn:1\">Many people say so.\n <a class=\"footnote-return\" href=\"#fnref:1\"><sup>[return]</sup></a></li>\n</ol>\n</div>") {
-
-		t.Fatalf("Got content:\n%q", p.content())
-	}
-}
-
 // Issue #3854
 // Also see https://github.com/gohugoio/hugo/issues/3977
 func TestPageWithDateFields(t *testing.T) {
@@ -693,8 +513,8 @@ weight: %d
 ---
 Simple Page With Some Date`
 
-	hasDate := func(p *Page) bool {
-		return p.Date.Year() == 2017
+	hasDate := func(p page.Page) bool {
+		return p.Date().Year() == 2017
 	}
 
 	datePage := func(field string, weight int) string {
@@ -702,7 +522,7 @@ Simple Page With Some Date`
 	}
 
 	t.Parallel()
-	assertFunc := func(t *testing.T, ext string, pages Pages) {
+	assertFunc := func(t *testing.T, ext string, pages page.Pages) {
 		assert.True(len(pages) > 0)
 		for _, p := range pages {
 			assert.True(hasDate(p))
@@ -733,8 +553,8 @@ title: Raw
 
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
 
-	require.Len(t, s.RegularPages, 1)
-	p := s.RegularPages[0]
+	require.Len(t, s.RegularPages(), 1)
+	p := s.RegularPages()[0]
 
 	require.Equal(t, p.RawContent(), "**Raw**")
 
@@ -742,7 +562,7 @@ title: Raw
 
 func TestPageWithShortCodeInSummary(t *testing.T) {
 	t.Parallel()
-	assertFunc := func(t *testing.T, ext string, pages Pages) {
+	assertFunc := func(t *testing.T, ext string, pages page.Pages) {
 		p := pages[0]
 		checkPageTitle(t, p, "Simple")
 		checkPageContent(t, p, normalizeExpected(ext, "<p>Summary Next Line. <figure> <img src=\"/not/real\"/> </figure> . More text here.</p><p>Some more text</p>"))
@@ -755,7 +575,7 @@ func TestPageWithShortCodeInSummary(t *testing.T) {
 
 func TestPageWithEmbeddedScriptTag(t *testing.T) {
 	t.Parallel()
-	assertFunc := func(t *testing.T, ext string, pages Pages) {
+	assertFunc := func(t *testing.T, ext string, pages page.Pages) {
 		p := pages[0]
 		if ext == "ad" || ext == "rst" {
 			// TOD(bep)
@@ -775,9 +595,9 @@ func TestPageWithAdditionalExtension(t *testing.T) {
 
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
 
-	require.Len(t, s.RegularPages, 1)
+	require.Len(t, s.RegularPages(), 1)
 
-	p := s.RegularPages[0]
+	p := s.RegularPages()[0]
 
 	checkPageContent(t, p, "<p>first line.<br />\nsecond line.</p>\n\n<p>fourth line.</p>\n")
 }
@@ -790,9 +610,9 @@ func TestTableOfContents(t *testing.T) {
 
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
 
-	require.Len(t, s.RegularPages, 1)
+	require.Len(t, s.RegularPages(), 1)
 
-	p := s.RegularPages[0]
+	p := s.RegularPages()[0]
 
 	checkPageContent(t, p, "\n\n<p>For some moments the old man did not reply. He stood with bowed head, buried in deep thought. But at last he spoke.</p>\n\n<h2 id=\"aa\">AA</h2>\n\n<p>I have no idea, of course, how long it took me to reach the limit of the plain,\nbut at last I entered the foothills, following a pretty little canyon upward\ntoward the mountains. Beside me frolicked a laughing brooklet, hurrying upon\nits noisy way down to the silent sea. In its quieter pools I discovered many\nsmall fish, of four-or five-pound weight I should imagine. In appearance,\nexcept as to size and color, they were not unlike the whale of our own seas. As\nI watched them playing about I discovered, not only that they suckled their\nyoung, but that at intervals they rose to the surface to breathe as well as to\nfeed upon certain grasses and a strange, scarlet lichen which grew upon the\nrocks just above the water line.</p>\n\n<h3 id=\"aaa\">AAA</h3>\n\n<p>I remember I felt an extraordinary persuasion that I was being played with,\nthat presently, when I was upon the very verge of safety, this mysterious\ndeath&ndash;as swift as the passage of light&ndash;would leap after me from the pit about\nthe cylinder and strike me down. ## BB</p>\n\n<h3 id=\"bbb\">BBB</h3>\n\n<p>&ldquo;You&rsquo;re a great Granser,&rdquo; he cried delightedly, &ldquo;always making believe them little marks mean something.&rdquo;</p>\n")
 	checkPageTOC(t, p, "<nav id=\"TableOfContents\">\n<ul>\n<li>\n<ul>\n<li><a href=\"#aa\">AA</a>\n<ul>\n<li><a href=\"#aaa\">AAA</a></li>\n<li><a href=\"#bbb\">BBB</a></li>\n</ul></li>\n</ul></li>\n</ul>\n</nav>")
@@ -800,7 +620,7 @@ func TestTableOfContents(t *testing.T) {
 
 func TestPageWithMoreTag(t *testing.T) {
 	t.Parallel()
-	assertFunc := func(t *testing.T, ext string, pages Pages) {
+	assertFunc := func(t *testing.T, ext string, pages page.Pages) {
 		p := pages[0]
 		checkPageTitle(t, p, "Simple")
 		checkPageContent(t, p, normalizeExpected(ext, "<p>Summary Same Line</p>\n\n<p>Some more text</p>\n"))
@@ -812,20 +632,10 @@ func TestPageWithMoreTag(t *testing.T) {
 	testAllMarkdownEnginesForPages(t, assertFunc, nil, simplePageWithSummaryDelimiterSameLine)
 }
 
-func TestPageWithMoreTagOnlySummary(t *testing.T) {
-
-	assertFunc := func(t *testing.T, ext string, pages Pages) {
-		p := pages[0]
-		checkTruncation(t, p, false, "page with summary delimiter at end")
-	}
-
-	testAllMarkdownEnginesForPages(t, assertFunc, nil, simplePageWithSummaryDelimiterOnlySummary)
-}
-
 // #2973
 func TestSummaryWithHTMLTagsOnNextLine(t *testing.T) {
 
-	assertFunc := func(t *testing.T, ext string, pages Pages) {
+	assertFunc := func(t *testing.T, ext string, pages page.Pages) {
 		p := pages[0]
 		require.Contains(t, p.Summary(), "Happy new year everyone!")
 		require.NotContains(t, p.Summary(), "User interface")
@@ -853,9 +663,9 @@ func TestPageWithDate(t *testing.T) {
 
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
 
-	require.Len(t, s.RegularPages, 1)
+	require.Len(t, s.RegularPages(), 1)
 
-	p := s.RegularPages[0]
+	p := s.RegularPages()[0]
 	d, _ := time.Parse(time.RFC3339, "2013-05-17T16:59:30Z")
 
 	checkPageDate(t, p, d)
@@ -905,16 +715,16 @@ func TestPageWithLastmodFromGitInfo(t *testing.T) {
 	require.NoError(t, h.Build(BuildCfg{SkipRender: true}))
 
 	enSite := h.Sites[0]
-	assrt.Len(enSite.RegularPages, 1)
+	assrt.Len(enSite.RegularPages(), 1)
 
 	// 2018-03-11 is the Git author date for testsite/content/first-post.md
-	assrt.Equal("2018-03-11", enSite.RegularPages[0].Lastmod.Format("2006-01-02"))
+	assrt.Equal("2018-03-11", enSite.RegularPages()[0].Lastmod().Format("2006-01-02"))
 
 	nnSite := h.Sites[1]
-	assrt.Len(nnSite.RegularPages, 1)
+	assrt.Len(nnSite.RegularPages(), 1)
 
 	// 2018-08-11 is the Git author date for testsite/content_nn/first-post.md
-	assrt.Equal("2018-08-11", nnSite.RegularPages[0].Lastmod.Format("2006-01-02"))
+	assrt.Equal("2018-08-11", nnSite.RegularPages()[0].Lastmod().Format("2006-01-02"))
 
 }
 
@@ -953,28 +763,28 @@ Content
 
 			s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
 
-			assrt.Len(s.RegularPages, 2)
+			assrt.Len(s.RegularPages(), 2)
 
-			noSlug := s.RegularPages[0]
-			slug := s.RegularPages[1]
+			noSlug := s.RegularPages()[0]
+			slug := s.RegularPages()[1]
 
-			assrt.Equal(28, noSlug.Lastmod.Day())
+			assrt.Equal(28, noSlug.Lastmod().Day())
 
 			switch strings.ToLower(dateHandler) {
 			case ":filename":
-				assrt.False(noSlug.Date.IsZero())
-				assrt.False(slug.Date.IsZero())
-				assrt.Equal(2012, noSlug.Date.Year())
-				assrt.Equal(2012, slug.Date.Year())
-				assrt.Equal("noslug", noSlug.Slug)
-				assrt.Equal("aslug", slug.Slug)
+				assrt.False(noSlug.Date().IsZero())
+				assrt.False(slug.Date().IsZero())
+				assrt.Equal(2012, noSlug.Date().Year())
+				assrt.Equal(2012, slug.Date().Year())
+				assrt.Equal("noslug", noSlug.Slug())
+				assrt.Equal("aslug", slug.Slug())
 			case ":filemodtime":
-				assrt.Equal(c1fi.ModTime().Year(), noSlug.Date.Year())
-				assrt.Equal(c2fi.ModTime().Year(), slug.Date.Year())
+				assrt.Equal(c1fi.ModTime().Year(), noSlug.Date().Year())
+				assrt.Equal(c2fi.ModTime().Year(), slug.Date().Year())
 				fallthrough
 			default:
-				assrt.Equal("", noSlug.Slug)
-				assrt.Equal("aslug", slug.Slug)
+				assrt.Equal("", noSlug.Slug())
+				assrt.Equal("aslug", slug.Slug())
 
 			}
 		})
@@ -984,10 +794,10 @@ Content
 
 func TestWordCountWithAllCJKRunesWithoutHasCJKLanguage(t *testing.T) {
 	t.Parallel()
-	assertFunc := func(t *testing.T, ext string, pages Pages) {
+	assertFunc := func(t *testing.T, ext string, pages page.Pages) {
 		p := pages[0]
 		if p.WordCount() != 8 {
-			t.Fatalf("[%s] incorrect word count for content '%s'. expected %v, got %v", ext, p.plain, 8, p.WordCount())
+			t.Fatalf("[%s] incorrect word count. expected %v, got %v", ext, 8, p.WordCount())
 		}
 	}
 
@@ -998,10 +808,10 @@ func TestWordCountWithAllCJKRunesHasCJKLanguage(t *testing.T) {
 	t.Parallel()
 	settings := map[string]interface{}{"hasCJKLanguage": true}
 
-	assertFunc := func(t *testing.T, ext string, pages Pages) {
+	assertFunc := func(t *testing.T, ext string, pages page.Pages) {
 		p := pages[0]
 		if p.WordCount() != 15 {
-			t.Fatalf("[%s] incorrect word count for content '%s'. expected %v, got %v", ext, p.plain, 15, p.WordCount())
+			t.Fatalf("[%s] incorrect word count, expected %v, got %v", ext, 15, p.WordCount())
 		}
 	}
 	testAllMarkdownEnginesForPages(t, assertFunc, settings, simplePageWithAllCJKRunes)
@@ -1011,15 +821,15 @@ func TestWordCountWithMainEnglishWithCJKRunes(t *testing.T) {
 	t.Parallel()
 	settings := map[string]interface{}{"hasCJKLanguage": true}
 
-	assertFunc := func(t *testing.T, ext string, pages Pages) {
+	assertFunc := func(t *testing.T, ext string, pages page.Pages) {
 		p := pages[0]
 		if p.WordCount() != 74 {
-			t.Fatalf("[%s] incorrect word count for content '%s'. expected %v, got %v", ext, p.plain, 74, p.WordCount())
+			t.Fatalf("[%s] incorrect word count, expected %v, got %v", ext, 74, p.WordCount())
 		}
 
-		if p.summary != simplePageWithMainEnglishWithCJKRunesSummary {
-			t.Fatalf("[%s] incorrect Summary for content '%s'. expected %v, got %v", ext, p.plain,
-				simplePageWithMainEnglishWithCJKRunesSummary, p.summary)
+		if p.Summary() != simplePageWithMainEnglishWithCJKRunesSummary {
+			t.Fatalf("[%s] incorrect Summary for content '%s'. expected %v, got %v", ext, p.Plain(),
+				simplePageWithMainEnglishWithCJKRunesSummary, p.Summary())
 		}
 	}
 
@@ -1032,15 +842,15 @@ func TestWordCountWithIsCJKLanguageFalse(t *testing.T) {
 		"hasCJKLanguage": true,
 	}
 
-	assertFunc := func(t *testing.T, ext string, pages Pages) {
+	assertFunc := func(t *testing.T, ext string, pages page.Pages) {
 		p := pages[0]
 		if p.WordCount() != 75 {
-			t.Fatalf("[%s] incorrect word count for content '%s'. expected %v, got %v", ext, p.plain, 74, p.WordCount())
+			t.Fatalf("[%s] incorrect word count for content '%s'. expected %v, got %v", ext, p.Plain(), 74, p.WordCount())
 		}
 
-		if p.summary != simplePageWithIsCJKLanguageFalseSummary {
-			t.Fatalf("[%s] incorrect Summary for content '%s'. expected %v, got %v", ext, p.plain,
-				simplePageWithIsCJKLanguageFalseSummary, p.summary)
+		if p.Summary() != simplePageWithIsCJKLanguageFalseSummary {
+			t.Fatalf("[%s] incorrect Summary for content '%s'. expected %v, got %v", ext, p.Plain(),
+				simplePageWithIsCJKLanguageFalseSummary, p.Summary())
 		}
 	}
 
@@ -1050,7 +860,7 @@ func TestWordCountWithIsCJKLanguageFalse(t *testing.T) {
 
 func TestWordCount(t *testing.T) {
 	t.Parallel()
-	assertFunc := func(t *testing.T, ext string, pages Pages) {
+	assertFunc := func(t *testing.T, ext string, pages page.Pages) {
 		p := pages[0]
 		if p.WordCount() != 483 {
 			t.Fatalf("[%s] incorrect word count. expected %v, got %v", ext, 483, p.WordCount())
@@ -1064,163 +874,11 @@ func TestWordCount(t *testing.T) {
 			t.Fatalf("[%s] incorrect min read. expected %v, got %v", ext, 3, p.ReadingTime())
 		}
 
-		checkTruncation(t, p, true, "long page")
 	}
 
 	testAllMarkdownEnginesForPages(t, assertFunc, nil, simplePageWithLongContent)
 }
 
-func TestCreatePage(t *testing.T) {
-	t.Parallel()
-	var tests = []struct {
-		r string
-	}{
-		{simplePageJSON},
-		{simplePageJSONMultiple},
-		//{strings.NewReader(SIMPLE_PAGE_JSON_COMPACT)},
-	}
-
-	for i, test := range tests {
-		s := newTestSite(t)
-		p, _ := s.NewPage("page")
-		if _, err := p.ReadFrom(strings.NewReader(test.r)); err != nil {
-			t.Fatalf("[%d] Unable to parse page: %s", i, err)
-		}
-	}
-}
-
-func TestDegenerateInvalidFrontMatterShortDelim(t *testing.T) {
-	t.Parallel()
-	var tests = []struct {
-		r   string
-		err string
-	}{
-		{invalidFrontmatterShortDelimEnding, "EOF looking for end YAML front matter delimiter"},
-	}
-	for _, test := range tests {
-		s := newTestSite(t)
-		p, _ := s.NewPage("invalid/front/matter/short/delim")
-		_, err := p.ReadFrom(strings.NewReader(test.r))
-		checkError(t, err, test.err)
-	}
-}
-
-func TestShouldRenderContent(t *testing.T) {
-	t.Parallel()
-	assert := require.New(t)
-
-	var tests = []struct {
-		text   string
-		render bool
-	}{
-		{contentNoFrontmatter, true},
-		{renderNoFrontmatter, false},
-		{contentWithCommentedFrontmatter, true},
-		{contentWithCommentedTextFrontmatter, true},
-		{contentWithCommentedLongFrontmatter, true},
-		{contentWithCommentedLong2Frontmatter, true},
-	}
-
-	for i, test := range tests {
-		s := newTestSite(t)
-		p, _ := s.NewPage("render/front/matter")
-		_, err := p.ReadFrom(strings.NewReader(test.text))
-		msg := fmt.Sprintf("test %d", i)
-		assert.NoError(err, msg)
-		assert.Equal(test.render, p.IsRenderable(), msg)
-	}
-}
-
-// Issue #768
-func TestCalendarParamsVariants(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	pageJSON, _ := s.NewPage("test/fileJSON.md")
-	_, _ = pageJSON.ReadFrom(strings.NewReader(pageWithCalendarJSONFrontmatter))
-
-	pageYAML, _ := s.NewPage("test/fileYAML.md")
-	_, _ = pageYAML.ReadFrom(strings.NewReader(pageWithCalendarYAMLFrontmatter))
-
-	pageTOML, _ := s.NewPage("test/fileTOML.md")
-	_, _ = pageTOML.ReadFrom(strings.NewReader(pageWithCalendarTOMLFrontmatter))
-
-	assert.True(t, compareObjects(pageJSON.params, pageYAML.params))
-	assert.True(t, compareObjects(pageJSON.params, pageTOML.params))
-
-}
-
-func TestDifferentFrontMatterVarTypes(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	page, _ := s.NewPage("test/file1.md")
-	_, _ = page.ReadFrom(strings.NewReader(pageWithVariousFrontmatterTypes))
-
-	dateval, _ := time.Parse(time.RFC3339, "1979-05-27T07:32:00Z")
-	if page.getParamToLower("a_string") != "bar" {
-		t.Errorf("frontmatter not handling strings correctly should be %s, got: %s", "bar", page.getParamToLower("a_string"))
-	}
-	if page.getParamToLower("an_integer") != 1 {
-		t.Errorf("frontmatter not handling ints correctly should be %s, got: %s", "1", page.getParamToLower("an_integer"))
-	}
-	if page.getParamToLower("a_float") != 1.3 {
-		t.Errorf("frontmatter not handling floats correctly should be %f, got: %s", 1.3, page.getParamToLower("a_float"))
-	}
-	if page.getParamToLower("a_bool") != false {
-		t.Errorf("frontmatter not handling bools correctly should be %t, got: %s", false, page.getParamToLower("a_bool"))
-	}
-	if page.getParamToLower("a_date") != dateval {
-		t.Errorf("frontmatter not handling dates correctly should be %s, got: %s", dateval, page.getParamToLower("a_date"))
-	}
-	param := page.getParamToLower("a_table")
-	if param == nil {
-		t.Errorf("frontmatter not handling tables correctly should be type of %v, got: type of %v", reflect.TypeOf(page.params["a_table"]), reflect.TypeOf(param))
-	}
-	if cast.ToStringMap(param)["a_key"] != "a_value" {
-		t.Errorf("frontmatter not handling values inside a table correctly should be %s, got: %s", "a_value", cast.ToStringMap(page.params["a_table"])["a_key"])
-	}
-}
-
-func TestDegenerateInvalidFrontMatterLeadingWhitespace(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	p, _ := s.NewPage("invalid/front/matter/leading/ws")
-	_, err := p.ReadFrom(strings.NewReader(invalidFrontmatterLadingWs))
-	if err != nil {
-		t.Fatalf("Unable to parse front matter given leading whitespace: %s", err)
-	}
-}
-
-func TestSectionEvaluation(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	page, _ := s.NewPage(filepath.FromSlash("blue/file1.md"))
-	page.ReadFrom(strings.NewReader(simplePage))
-	if page.Section() != "blue" {
-		t.Errorf("Section should be %s, got: %s", "blue", page.Section())
-	}
-}
-
-func TestSliceToLower(t *testing.T) {
-	t.Parallel()
-	tests := []struct {
-		value    []string
-		expected []string
-	}{
-		{[]string{"a", "b", "c"}, []string{"a", "b", "c"}},
-		{[]string{"a", "B", "c"}, []string{"a", "b", "c"}},
-		{[]string{"A", "B", "C"}, []string{"a", "b", "c"}},
-	}
-
-	for _, test := range tests {
-		res := helpers.SliceToLower(test.value)
-		for i, val := range res {
-			if val != test.expected[i] {
-				t.Errorf("Case mismatch. Expected %s, got %s", test.expected[i], res[i])
-			}
-		}
-	}
-}
-
 func TestPagePaths(t *testing.T) {
 	t.Parallel()
 
@@ -1254,210 +912,11 @@ func TestPagePaths(t *testing.T) {
 		writeSource(t, fs, filepath.Join("content", filepath.FromSlash(test.path)), test.content)
 
 		s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
-		require.Len(t, s.RegularPages, 1)
-
-	}
-}
+		require.Len(t, s.RegularPages(), 1)
 
-var pagesWithPublishedFalse = `---
-title: okay
-published: false
----
-some content
-`
-var pageWithPublishedTrue = `---
-title: okay
-published: true
----
-some content
-`
-
-func TestPublishedFrontMatter(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	p, err := s.newPageFrom(strings.NewReader(pagesWithPublishedFalse), "content/post/broken.md")
-	if err != nil {
-		t.Fatalf("err during parse: %s", err)
-	}
-	if !p.Draft {
-		t.Errorf("expected true, got %t", p.Draft)
-	}
-	p, err = s.newPageFrom(strings.NewReader(pageWithPublishedTrue), "content/post/broken.md")
-	if err != nil {
-		t.Fatalf("err during parse: %s", err)
-	}
-	if p.Draft {
-		t.Errorf("expected false, got %t", p.Draft)
 	}
 }
 
-var pagesDraftTemplate = []string{`---
-title: "okay"
-draft: %t
----
-some content
-`,
-	`+++
-title = "okay"
-draft = %t
-+++
-
-some content
-`,
-}
-
-func TestDraft(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	for _, draft := range []bool{true, false} {
-		for i, templ := range pagesDraftTemplate {
-			pageContent := fmt.Sprintf(templ, draft)
-			p, err := s.newPageFrom(strings.NewReader(pageContent), "content/post/broken.md")
-			if err != nil {
-				t.Fatalf("err during parse: %s", err)
-			}
-			if p.Draft != draft {
-				t.Errorf("[%d] expected %t, got %t", i, draft, p.Draft)
-			}
-		}
-	}
-}
-
-var pagesParamsTemplate = []string{`+++
-title = "okay"
-draft = false
-tags = [ "hugo", "web" ]
-social= [
-  [ "a", "#" ],
-  [ "b", "#" ],
-]
-+++
-some content
-`,
-	`---
-title: "okay"
-draft: false
-tags:
-  - hugo
-  - web
-social:
-  - - a
-    - "#"
-  - - b
-    - "#"
----
-some content
-`,
-	`{
-	"title": "okay",
-	"draft": false,
-	"tags": [ "hugo", "web" ],
-	"social": [
-		[ "a", "#" ],
-		[ "b", "#" ]
-	]
-}
-some content
-`,
-}
-
-func TestPageParams(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	wantedMap := map[string]interface{}{
-		"tags": []string{"hugo", "web"},
-		// Issue #2752
-		"social": []interface{}{
-			[]interface{}{"a", "#"},
-			[]interface{}{"b", "#"},
-		},
-	}
-
-	for i, c := range pagesParamsTemplate {
-		p, err := s.newPageFrom(strings.NewReader(c), "content/post/params.md")
-		require.NoError(t, err, "err during parse", "#%d", i)
-		for key := range wantedMap {
-			assert.Equal(t, wantedMap[key], p.params[key], "#%d", key)
-		}
-	}
-}
-
-func TestTraverse(t *testing.T) {
-	exampleParams := `---
-rating: "5 stars"
-tags:
-  - hugo
-  - web
-social:
-  twitter: "@jxxf"
-  facebook: "https://example.com"
----`
-	t.Parallel()
-	s := newTestSite(t)
-	p, _ := s.newPageFrom(strings.NewReader(exampleParams), "content/post/params.md")
-
-	topLevelKeyValue, _ := p.Param("rating")
-	assert.Equal(t, "5 stars", topLevelKeyValue)
-
-	nestedStringKeyValue, _ := p.Param("social.twitter")
-	assert.Equal(t, "@jxxf", nestedStringKeyValue)
-
-	nonexistentKeyValue, _ := p.Param("doesn't.exist")
-	assert.Nil(t, nonexistentKeyValue)
-}
-
-func TestPageSimpleMethods(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	for i, this := range []struct {
-		assertFunc func(p *Page) bool
-	}{
-		{func(p *Page) bool { return !p.IsNode() }},
-		{func(p *Page) bool { return p.IsPage() }},
-		{func(p *Page) bool { return p.Plain() == "Do Be Do Be Do" }},
-		{func(p *Page) bool { return strings.Join(p.PlainWords(), " ") == "Do Be Do Be Do" }},
-	} {
-
-		p, _ := s.NewPage("Test")
-		p.workContent = []byte("<h1>Do Be Do Be Do</h1>")
-		p.resetContent()
-		if !this.assertFunc(p) {
-			t.Errorf("[%d] Page method error", i)
-		}
-	}
-}
-
-func TestIndexPageSimpleMethods(t *testing.T) {
-	s := newTestSite(t)
-	t.Parallel()
-	for i, this := range []struct {
-		assertFunc func(n *Page) bool
-	}{
-		{func(n *Page) bool { return n.IsNode() }},
-		{func(n *Page) bool { return !n.IsPage() }},
-		{func(n *Page) bool { return n.Scratch() != nil }},
-		{func(n *Page) bool { return n.Hugo().Version() != "" }},
-	} {
-
-		n := s.newHomePage()
-
-		if !this.assertFunc(n) {
-			t.Errorf("[%d] Node method error", i)
-		}
-	}
-}
-
-func TestKind(t *testing.T) {
-	t.Parallel()
-	// Add tests for these constants to make sure they don't change
-	require.Equal(t, "page", KindPage)
-	require.Equal(t, "home", KindHome)
-	require.Equal(t, "section", KindSection)
-	require.Equal(t, "taxonomy", KindTaxonomy)
-	require.Equal(t, "taxonomyTerm", KindTaxonomyTerm)
-
-}
-
 func TestTranslationKey(t *testing.T) {
 	t.Parallel()
 	assert := require.New(t)
@@ -1468,13 +927,13 @@ func TestTranslationKey(t *testing.T) {
 
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
 
-	require.Len(t, s.RegularPages, 2)
+	require.Len(t, s.RegularPages(), 2)
 
 	home, _ := s.Info.Home()
 	assert.NotNil(home)
 	assert.Equal("home", home.TranslationKey())
-	assert.Equal("page/k1", s.RegularPages[0].TranslationKey())
-	p2 := s.RegularPages[1]
+	assert.Equal("page/k1", s.RegularPages()[0].TranslationKey())
+	p2 := s.RegularPages()[1]
 
 	assert.Equal("page/sect/simple", p2.TranslationKey())
 
@@ -1490,9 +949,9 @@ func TestChompBOM(t *testing.T) {
 
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
 
-	require.Len(t, s.RegularPages, 1)
+	require.Len(t, s.RegularPages(), 1)
 
-	p := s.RegularPages[0]
+	p := s.RegularPages()[0]
 
 	checkPageTitle(t, p, "Simple")
 }
@@ -1554,6 +1013,43 @@ but if you like it, hit :+1: and get subscribed!
 
 }
 
+func TestPageHTMLContent(t *testing.T) {
+	b := newTestSitesBuilder(t)
+	b.WithSimpleConfigFile()
+
+	frontmatter := `---
+title: "HTML Content"
+---
+`
+	b.WithContent("regular.html", frontmatter+`<h1>Hugo</h1>`)
+	b.WithContent("noblackfridayforyou.html", frontmatter+`**Hugo!**`)
+	b.WithContent("manualsummary.html", frontmatter+`
+<p>This is summary</p>
+<!--more-->
+<p>This is the main content.</p>`)
+
+	b.Build(BuildCfg{})
+
+	b.AssertFileContent(
+		"public/regular/index.html",
+		"Single: HTML Content|Hello|en|RelPermalink: /regular/|",
+		"Summary: Hugo|Truncated: false")
+
+	b.AssertFileContent(
+		"public/noblackfridayforyou/index.html",
+		"Permalink: http://example.com/noblackfridayforyou/|**Hugo!**|",
+	)
+
+	// https://github.com/gohugoio/hugo/issues/5723
+	b.AssertFileContent(
+		"public/manualsummary/index.html",
+		"Single: HTML Content|Hello|en|RelPermalink: /manualsummary/|",
+		"Summary: \n<p>This is summary</p>\n|Truncated: true",
+		"|<p>This is the main content.</p>|",
+	)
+
+}
+
 // https://github.com/gohugoio/hugo/issues/5381
 func TestPageManualSummary(t *testing.T) {
 	b := newTestSitesBuilder(t)
@@ -1670,17 +1166,6 @@ Content:{{ .Content }}
 
 }
 
-// TODO(bep) this may be useful for other tests.
-func compareObjects(a interface{}, b interface{}) bool {
-	aStr := strings.Split(fmt.Sprintf("%v", a), "")
-	sort.Strings(aStr)
-
-	bStr := strings.Split(fmt.Sprintf("%v", b), "")
-	sort.Strings(bStr)
-
-	return strings.Join(aStr, "") == strings.Join(bStr, "")
-}
-
 func TestShouldBuild(t *testing.T) {
 	t.Parallel()
 	var past = time.Date(2009, 11, 17, 20, 34, 58, 651387237, time.UTC)
@@ -1773,7 +1258,7 @@ tags:
 
 				s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
 
-				require.Len(t, s.RegularPages, 4)
+				require.Len(t, s.RegularPages(), 4)
 
 				pathFunc := func(s string) string {
 					if uglyURLs {
@@ -1804,7 +1289,7 @@ tags:
 
 				}
 
-				p := s.RegularPages[0]
+				p := s.RegularPages()[0]
 				if uglyURLs {
 					require.Equal(t, "/post/test0.dot.html", p.RelPermalink())
 				} else {
@@ -1900,7 +1385,7 @@ Summary: In Chinese, 好 means good.
 	b.CreateSites().Build(BuildCfg{})
 
 	assert.Equal(1, len(b.H.Sites))
-	require.Len(t, b.H.Sites[0].RegularPages, 6)
+	require.Len(t, b.H.Sites[0].RegularPages(), 6)
 
 	b.AssertFileContent("public/p1/index.html", "WordCount: 510\nFuzzyWordCount: 600\nReadingTime: 3\nLen Plain: 2550\nLen PlainWords: 510\nTruncated: false\nLen Summary: 2549\nLen Content: 2557")
 
@@ -1939,15 +1424,3 @@ title: Scratch Me!
 	b.AssertFileContent("public/index.html", "B: bv")
 	b.AssertFileContent("public/scratchme/index.html", "C: cv")
 }
-
-func BenchmarkParsePage(b *testing.B) {
-	s := newTestSite(b)
-	f, _ := os.Open("testdata/redis.cn.md")
-	var buf bytes.Buffer
-	buf.ReadFrom(f)
-	b.ResetTimer()
-	for i := 0; i < b.N; i++ {
-		page, _ := s.NewPage("bench")
-		page.ReadFrom(bytes.NewReader(buf.Bytes()))
-	}
-}
diff --git a/hugolib/page_time_integration_test.go b/hugolib/page_time_integration_test.go
deleted file mode 100644
index f180afa5e2e..00000000000
--- a/hugolib/page_time_integration_test.go
+++ /dev/null
@@ -1,183 +0,0 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
-	"fmt"
-	"os"
-	"strings"
-	"sync"
-	"testing"
-	"time"
-
-	"github.com/spf13/cast"
-)
-
-const (
-	pageWithInvalidDate = `---
-date: 2010-05-02_15:29:31+08:00
----
-Page With Invalid Date (replace T with _ for RFC 3339)`
-
-	pageWithDateRFC3339 = `---
-date: 2010-05-02T15:29:31+08:00
----
-Page With Date RFC3339`
-
-	pageWithDateRFC3339NoT = `---
-date: 2010-05-02 15:29:31+08:00
----
-Page With Date RFC3339_NO_T`
-
-	pageWithRFC1123 = `---
-date: Sun, 02 May 2010 15:29:31 PST
----
-Page With Date RFC1123`
-
-	pageWithDateRFC1123Z = `---
-date: Sun, 02 May 2010 15:29:31 +0800
----
-Page With Date RFC1123Z`
-
-	pageWithDateRFC822 = `---
-date: 02 May 10 15:29 PST
----
-Page With Date RFC822`
-
-	pageWithDateRFC822Z = `---
-date: 02 May 10 15:29 +0800
----
-Page With Date RFC822Z`
-
-	pageWithDateANSIC = `---
-date: Sun May 2 15:29:31 2010
----
-Page With Date ANSIC`
-
-	pageWithDateUnixDate = `---
-date: Sun May 2 15:29:31 PST 2010
----
-Page With Date UnixDate`
-
-	pageWithDateRubyDate = `---
-date: Sun May 02 15:29:31 +0800 2010
----
-Page With Date RubyDate`
-
-	pageWithDateHugoYearNumeric = `---
-date: 2010-05-02
----
-Page With Date HugoYearNumeric`
-
-	pageWithDateHugoYear = `---
-date: 02 May 2010
----
-Page With Date HugoYear`
-
-	pageWithDateHugoLong = `---
-date: 02 May 2010 15:29 PST
----
-Page With Date HugoLong`
-)
-
-func TestDegenerateDateFrontMatter(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	p, _ := s.newPageFrom(strings.NewReader(pageWithInvalidDate), "page/with/invalid/date")
-	if p.Date != *new(time.Time) {
-		t.Fatalf("Date should be set to time.Time zero value.  Got: %s", p.Date)
-	}
-}
-
-func TestParsingDateInFrontMatter(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	tests := []struct {
-		buf string
-		dt  string
-	}{
-		{pageWithDateRFC3339, "2010-05-02T15:29:31+08:00"},
-		{pageWithDateRFC3339NoT, "2010-05-02T15:29:31+08:00"},
-		{pageWithDateRFC1123Z, "2010-05-02T15:29:31+08:00"},
-		{pageWithDateRFC822Z, "2010-05-02T15:29:00+08:00"},
-		{pageWithDateANSIC, "2010-05-02T15:29:31Z"},
-		{pageWithDateRubyDate, "2010-05-02T15:29:31+08:00"},
-		{pageWithDateHugoYearNumeric, "2010-05-02T00:00:00Z"},
-		{pageWithDateHugoYear, "2010-05-02T00:00:00Z"},
-	}
-
-	tzShortCodeTests := []struct {
-		buf string
-		dt  string
-	}{
-		{pageWithRFC1123, "2010-05-02T15:29:31-08:00"},
-		{pageWithDateRFC822, "2010-05-02T15:29:00-08:00Z"},
-		{pageWithDateUnixDate, "2010-05-02T15:29:31-08:00"},
-		{pageWithDateHugoLong, "2010-05-02T15:21:00+08:00"},
-	}
-
-	if _, err := time.LoadLocation("PST"); err == nil {
-		tests = append(tests, tzShortCodeTests...)
-	} else {
-		fmt.Fprintf(os.Stderr, "Skipping shortname timezone tests.\n")
-	}
-
-	for _, test := range tests {
-		dt, e := time.Parse(time.RFC3339, test.dt)
-		if e != nil {
-			t.Fatalf("Unable to parse date time (RFC3339) for running the test: %s", e)
-		}
-		p, err := s.newPageFrom(strings.NewReader(test.buf), "page/with/date")
-		if err != nil {
-			t.Fatalf("Expected to be able to parse page.")
-		}
-		if !dt.Equal(p.Date) {
-			t.Errorf("Date does not equal frontmatter:\n%s\nExpecting: %s\n      Got: %s. Diff: %s\n internal: %#v\n           %#v", test.buf, dt, p.Date, dt.Sub(p.Date), dt, p.Date)
-		}
-	}
-}
-
-// Temp test https://github.com/gohugoio/hugo/issues/3059
-func TestParsingDateParallel(t *testing.T) {
-	t.Parallel()
-
-	var wg sync.WaitGroup
-
-	for j := 0; j < 100; j++ {
-		wg.Add(1)
-		go func() {
-			defer wg.Done()
-			for j := 0; j < 100; j++ {
-				dateStr := "2010-05-02 15:29:31 +08:00"
-
-				dt, err := time.Parse("2006-01-02 15:04:05 -07:00", dateStr)
-				if err != nil {
-					t.Fatal(err)
-				}
-
-				if dt.Year() != 2010 {
-					t.Fatal("time.Parse: Invalid date:", dt)
-				}
-
-				dt2 := cast.ToTime(dateStr)
-
-				if dt2.Year() != 2010 {
-					t.Fatal("cast.ToTime: Invalid date:", dt2.Year())
-				}
-			}
-		}()
-	}
-	wg.Wait()
-
-}
diff --git a/hugolib/page_unwrap.go b/hugolib/page_unwrap.go
new file mode 100644
index 00000000000..eda6636d162
--- /dev/null
+++ b/hugolib/page_unwrap.go
@@ -0,0 +1,50 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"github.com/pkg/errors"
+
+	"github.com/gohugoio/hugo/resources/page"
+)
+
+// Wraps a Page.
+type pageWrapper interface {
+	page() page.Page
+}
+
+// unwrapPage is used in equality checks and similar.
+func unwrapPage(in interface{}) (page.Page, error) {
+	switch v := in.(type) {
+	case *pageState:
+		return v, nil
+	case pageWrapper:
+		return v.page(), nil
+	case page.Page:
+		return v, nil
+	case nil:
+		return nil, nil
+	default:
+		return nil, errors.Errorf("unwrapPage: %T not supported", in)
+	}
+}
+
+func mustUnwrapPage(in interface{}) page.Page {
+	p, err := unwrapPage(in)
+	if err != nil {
+		panic(err)
+	}
+
+	return p
+}
diff --git a/hugolib/path_separators_test.go b/hugolib/page_unwrap_test.go
similarity index 57%
rename from hugolib/path_separators_test.go
rename to hugolib/page_unwrap_test.go
index 0d769e65013..23747dce83b 100644
--- a/hugolib/path_separators_test.go
+++ b/hugolib/page_unwrap_test.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -14,25 +14,24 @@
 package hugolib
 
 import (
-	"path/filepath"
-	"strings"
 	"testing"
+
+	"github.com/gohugoio/hugo/resources/page"
+	"github.com/stretchr/testify/require"
 )
 
-var simplePageYAML = `---
-contenttype: ""
----
-Sample Text
-`
+func TestUnwrapPage(t *testing.T) {
+	assert := require.New(t)
+
+	p := &pageState{}
 
-func TestDegenerateMissingFolderInPageFilename(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	p, err := s.newPageFrom(strings.NewReader(simplePageYAML), filepath.Join("foobar"))
+	assert.Equal(p, mustUnwrap(newPageForShortcode(p)))
+}
+
+func mustUnwrap(v interface{}) page.Page {
+	p, err := unwrapPage(v)
 	if err != nil {
-		t.Fatalf("Error in NewPageFrom")
-	}
-	if p.Section() != "" {
-		t.Fatalf("No section should be set for a file path: foobar")
+		panic(err)
 	}
+	return p
 }
diff --git a/hugolib/page_without_content.go b/hugolib/page_without_content.go
deleted file mode 100644
index 3659efaeaf4..00000000000
--- a/hugolib/page_without_content.go
+++ /dev/null
@@ -1,67 +0,0 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
-	"html/template"
-)
-
-// PageWithoutContent is sent to the shortcodes. They cannot access the content
-// they're a part of. It would cause an infinite regress.
-//
-// Go doesn't support virtual methods, so this careful dance is currently (I think)
-// the best we can do.
-type PageWithoutContent struct {
-	*Page
-}
-
-// Content returns an empty string.
-func (p *PageWithoutContent) Content() (interface{}, error) {
-	return "", nil
-}
-
-// Truncated always returns false.
-func (p *PageWithoutContent) Truncated() bool {
-	return false
-}
-
-// Summary returns an empty string.
-func (p *PageWithoutContent) Summary() template.HTML {
-	return ""
-}
-
-// WordCount always returns 0.
-func (p *PageWithoutContent) WordCount() int {
-	return 0
-}
-
-// ReadingTime always returns 0.
-func (p *PageWithoutContent) ReadingTime() int {
-	return 0
-}
-
-// FuzzyWordCount always returns 0.
-func (p *PageWithoutContent) FuzzyWordCount() int {
-	return 0
-}
-
-// Plain returns an empty string.
-func (p *PageWithoutContent) Plain() string {
-	return ""
-}
-
-// PlainWords returns an empty string slice.
-func (p *PageWithoutContent) PlainWords() []string {
-	return []string{}
-}
diff --git a/hugolib/pagebundler.go b/hugolib/pagebundler.go
index 62ef2b52bc3..e2e95ab4c54 100644
--- a/hugolib/pagebundler.go
+++ b/hugolib/pagebundler.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -17,6 +17,7 @@ import (
 	"context"
 	"fmt"
 	"math"
+	"path/filepath"
 	"runtime"
 
 	_errors "github.com/pkg/errors"
@@ -38,12 +39,12 @@ type siteContentProcessor struct {
 	fileSinglesChan chan *fileInfo
 
 	// These assets should be just copied to destination.
-	fileAssetsChan chan []pathLangFile
+	fileAssetsChan chan pathLangFile
 
 	numWorkers int
 
 	// The output Pages
-	pagesChan chan *Page
+	pagesChan chan *pageState
 
 	// Used for partial rebuilds (aka. live reload)
 	// Will signal replacement of pages in the site collection.
@@ -64,9 +65,9 @@ func (s *siteContentProcessor) processSingle(fi *fileInfo) {
 	}
 }
 
-func (s *siteContentProcessor) processAssets(assets []pathLangFile) {
+func (s *siteContentProcessor) processAsset(asset pathLangFile) {
 	select {
-	case s.fileAssetsChan <- assets:
+	case s.fileAssetsChan <- asset:
 	case <-s.ctx.Done():
 	}
 }
@@ -77,7 +78,7 @@ func newSiteContentProcessor(ctx context.Context, partialBuild bool, s *Site) *s
 		numWorkers = n
 	}
 
-	numWorkers = int(math.Ceil(float64(numWorkers) / float64(len(s.owner.Sites))))
+	numWorkers = int(math.Ceil(float64(numWorkers) / float64(len(s.h.Sites))))
 
 	return &siteContentProcessor{
 		ctx:             ctx,
@@ -86,9 +87,9 @@ func newSiteContentProcessor(ctx context.Context, partialBuild bool, s *Site) *s
 		handleContent:   newHandlerChain(s),
 		fileBundlesChan: make(chan *bundleDir, numWorkers),
 		fileSinglesChan: make(chan *fileInfo, numWorkers),
-		fileAssetsChan:  make(chan []pathLangFile, numWorkers),
+		fileAssetsChan:  make(chan pathLangFile, numWorkers),
 		numWorkers:      numWorkers,
-		pagesChan:       make(chan *Page, numWorkers),
+		pagesChan:       make(chan *pageState, numWorkers),
 	}
 }
 
@@ -140,22 +141,20 @@ func (s *siteContentProcessor) process(ctx context.Context) error {
 		g2.Go(func() error {
 			for {
 				select {
-				case files, ok := <-s.fileAssetsChan:
+				case file, ok := <-s.fileAssetsChan:
 					if !ok {
 						return nil
 					}
-					for _, file := range files {
-						f, err := s.site.BaseFs.Content.Fs.Open(file.Filename())
-						if err != nil {
-							return _errors.Wrap(err, "failed to open assets file")
-						}
-						err = s.site.publish(&s.site.PathSpec.ProcessingStats.Files, file.Path(), f)
-						f.Close()
-						if err != nil {
-							return err
-						}
+					f, err := s.site.BaseFs.Content.Fs.Open(file.Filename())
+					if err != nil {
+						return _errors.Wrap(err, "failed to open assets file")
+					}
+					filename := filepath.Join(s.site.GetTargetLanguageBasePath(), file.Path())
+					err = s.site.publish(&s.site.PathSpec.ProcessingStats.Files, filename, f)
+					f.Close()
+					if err != nil {
+						return err
 					}
-
 				case <-ctx.Done():
 					return ctx.Err()
 				}
@@ -192,8 +191,6 @@ func (s *siteContentProcessor) process(ctx context.Context) error {
 		return err
 	}
 
-	s.site.rawAllPages.sort()
-
 	return nil
 
 }
diff --git a/hugolib/pagebundler_capture.go b/hugolib/pagebundler_capture.go
index 446d3b0c741..17a4b865a4d 100644
--- a/hugolib/pagebundler_capture.go
+++ b/hugolib/pagebundler_capture.go
@@ -116,7 +116,7 @@ func newCapturer(
 // these channels.
 type captureResultHandler interface {
 	handleSingles(fis ...*fileInfo)
-	handleCopyFiles(fis ...pathLangFile)
+	handleCopyFile(fi pathLangFile)
 	captureBundlesHandler
 }
 
@@ -141,10 +141,10 @@ func (c *captureResultHandlerChain) handleBundles(b *bundleDirs) {
 	}
 }
 
-func (c *captureResultHandlerChain) handleCopyFiles(files ...pathLangFile) {
+func (c *captureResultHandlerChain) handleCopyFile(file pathLangFile) {
 	for _, h := range c.handlers {
 		if hh, ok := h.(captureResultHandler); ok {
-			hh.handleCopyFiles(files...)
+			hh.handleCopyFile(file)
 		}
 	}
 }
@@ -444,7 +444,7 @@ func (c *capturer) handleNonBundle(
 				}
 				c.handler.handleSingles(f)
 			} else {
-				c.handler.handleCopyFiles(fi)
+				c.handler.handleCopyFile(fi)
 			}
 		}
 	}
@@ -457,7 +457,7 @@ func (c *capturer) copyOrHandleSingle(fi *fileInfo) {
 		c.handler.handleSingles(fi)
 	} else {
 		// These do not currently need any further processing.
-		c.handler.handleCopyFiles(fi)
+		c.handler.handleCopyFile(fi)
 	}
 }
 
diff --git a/hugolib/pagebundler_capture_test.go b/hugolib/pagebundler_capture_test.go
index d6128352c0a..b6d9822af86 100644
--- a/hugolib/pagebundler_capture_test.go
+++ b/hugolib/pagebundler_capture_test.go
@@ -64,12 +64,10 @@ func (s *storeFilenames) handleBundles(d *bundleDirs) {
 	s.dirKeys = append(s.dirKeys, keys...)
 }
 
-func (s *storeFilenames) handleCopyFiles(files ...pathLangFile) {
+func (s *storeFilenames) handleCopyFile(file pathLangFile) {
 	s.Lock()
 	defer s.Unlock()
-	for _, file := range files {
-		s.copyNames = append(s.copyNames, filepath.ToSlash(file.Filename()))
-	}
+	s.copyNames = append(s.copyNames, filepath.ToSlash(file.Filename()))
 }
 
 func (s *storeFilenames) sortedStr() string {
@@ -224,9 +222,9 @@ C:
 
 type noOpFileStore int
 
-func (noOpFileStore) handleSingles(fis ...*fileInfo)        {}
-func (noOpFileStore) handleBundles(b *bundleDirs)           {}
-func (noOpFileStore) handleCopyFiles(files ...pathLangFile) {}
+func (noOpFileStore) handleSingles(fis ...*fileInfo)   {}
+func (noOpFileStore) handleBundles(b *bundleDirs)      {}
+func (noOpFileStore) handleCopyFile(file pathLangFile) {}
 
 func BenchmarkPageBundlerCapture(b *testing.B) {
 	capturers := make([]*capturer, b.N)
diff --git a/hugolib/pagebundler_handlers.go b/hugolib/pagebundler_handlers.go
index 2df1f87656f..c217b5e0944 100644
--- a/hugolib/pagebundler_handlers.go
+++ b/hugolib/pagebundler_handlers.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -17,11 +17,11 @@ import (
 	"errors"
 	"fmt"
 	"path/filepath"
-	"sort"
+
+	"github.com/gohugoio/hugo/common/hugio"
 
 	"strings"
 
-	"github.com/gohugoio/hugo/helpers"
 	"github.com/gohugoio/hugo/resources"
 	"github.com/gohugoio/hugo/resources/resource"
 )
@@ -50,13 +50,9 @@ func init() {
 func newHandlerChain(s *Site) contentHandler {
 	c := &contentHandlers{s: s}
 
-	contentFlow := c.parsePage(c.processFirstMatch(
-		// Handles all files with a content file extension. See above.
+	contentFlow := c.parsePage(
 		c.handlePageContent(),
-
-		// Every HTML file without front matter will be passed on to this handler.
-		c.handleHTMLContent(),
-	))
+	)
 
 	c.rootHandler = c.processFirstMatch(
 		contentFlow,
@@ -93,12 +89,12 @@ func (c *contentHandlers) processFirstMatch(handlers ...contentHandler) func(ctx
 
 type handlerContext struct {
 	// These are the pages stored in Site.
-	pages chan<- *Page
+	pages chan<- *pageState
 
 	doNotAddToSiteCollections bool
 
-	currentPage *Page
-	parentPage  *Page
+	currentPage *pageState
+	parentPage  *pageState
 
 	bundle *bundleDir
 
@@ -110,10 +106,7 @@ type handlerContext struct {
 
 func (c *handlerContext) ext() string {
 	if c.currentPage != nil {
-		if c.currentPage.Markup != "" {
-			return c.currentPage.Markup
-		}
-		return c.currentPage.Ext()
+		return c.currentPage.contentMarkupType()
 	}
 
 	if c.bundle != nil {
@@ -175,9 +168,9 @@ func (c *handlerContext) isContentFile() bool {
 
 type (
 	handlerResult struct {
-		err      error
-		handled  bool
-		resource resource.Resource
+		err     error
+		handled bool
+		result  interface{}
 	}
 
 	contentHandler func(ctx *handlerContext) handlerResult
@@ -196,27 +189,27 @@ func (c *contentHandlers) parsePage(h contentHandler) contentHandler {
 		result := handlerResult{handled: true}
 		fi := ctx.file()
 
-		f, err := fi.Open()
-		if err != nil {
-			return handlerResult{err: fmt.Errorf("(%s) failed to open content file: %s", fi.Filename(), err)}
+		content := func() (hugio.ReadSeekCloser, error) {
+			f, err := fi.Open()
+			if err != nil {
+				return nil, fmt.Errorf("failed to open content file %q: %s", fi.Filename(), err)
+			}
+			return f, nil
 		}
-		defer f.Close()
-
-		p := c.s.newPageFromFile(fi)
 
-		_, err = p.ReadFrom(f)
+		ps, err := newPageWithContent(fi, c.s, content)
 		if err != nil {
 			return handlerResult{err: err}
 		}
 
-		if !p.shouldBuild() {
+		if !c.s.shouldBuild(ps) {
 			if !ctx.doNotAddToSiteCollections {
-				ctx.pages <- p
+				ctx.pages <- ps
 			}
 			return result
 		}
 
-		ctx.currentPage = p
+		ctx.currentPage = ps
 
 		if ctx.bundle != nil {
 			// Add the bundled files
@@ -226,39 +219,20 @@ func (c *contentHandlers) parsePage(h contentHandler) contentHandler {
 				if res.err != nil {
 					return res
 				}
-				if res.resource != nil {
-					if pageResource, ok := res.resource.(*Page); ok {
-						pageResource.resourcePath = filepath.ToSlash(childCtx.target)
-						pageResource.parent = p
+				if res.result != nil {
+					switch resv := res.result.(type) {
+					case *pageState:
+						resv.m.resourcePath = filepath.ToSlash(childCtx.target)
+						resv.parent = ps
+						ps.addResources(resv)
+					case resource.Resource:
+						ps.addResources(resv)
+
+					default:
+						panic("Unknown type")
 					}
-					p.Resources = append(p.Resources, res.resource)
-				}
-			}
-
-			sort.SliceStable(p.Resources, func(i, j int) bool {
-				if p.Resources[i].ResourceType() < p.Resources[j].ResourceType() {
-					return true
-				}
-
-				p1, ok1 := p.Resources[i].(*Page)
-				p2, ok2 := p.Resources[j].(*Page)
-
-				if ok1 != ok2 {
-					return ok2
-				}
-
-				if ok1 {
-					return defaultPageSort(p1, p2)
 				}
-
-				return p.Resources[i].RelPermalink() < p.Resources[j].RelPermalink()
-			})
-
-			// Assign metadata from front matter if set
-			if len(p.resourcesMetadata) > 0 {
-				resources.AssignMetadata(p.resourcesMetadata, p.Resources...)
 			}
-
 		}
 
 		return h(ctx)
@@ -267,58 +241,47 @@ func (c *contentHandlers) parsePage(h contentHandler) contentHandler {
 
 func (c *contentHandlers) handlePageContent() contentHandler {
 	return func(ctx *handlerContext) handlerResult {
-		if ctx.supports("html", "htm") {
-			return notHandled
-		}
-
 		p := ctx.currentPage
 
-		p.workContent = p.renderContent(p.workContent)
-
-		tmpContent, tmpTableOfContents := helpers.ExtractTOC(p.workContent)
-		p.TableOfContents = helpers.BytesToHTML(tmpTableOfContents)
-		p.workContent = tmpContent
-
 		if !ctx.doNotAddToSiteCollections {
 			ctx.pages <- p
 		}
 
-		return handlerResult{handled: true, resource: p}
+		return handlerResult{handled: true, result: p}
 	}
 }
 
-func (c *contentHandlers) handleHTMLContent() contentHandler {
+func (c *contentHandlers) createResource() contentHandler {
 	return func(ctx *handlerContext) handlerResult {
-		if !ctx.supports("html", "htm") {
+		if ctx.parentPage == nil {
 			return notHandled
 		}
 
-		p := ctx.currentPage
-
-		if !ctx.doNotAddToSiteCollections {
-			ctx.pages <- p
-		}
-
-		return handlerResult{handled: true, resource: p}
-	}
-}
+		// TODO(bep) consolidate with multihost logic + clean up
+		outputFormats := ctx.parentPage.m.outputFormats()
+		seen := make(map[string]bool)
+		var targetBasePaths []string
+		// Make sure bundled resources are published to all of the ouptput formats'
+		// sub paths.
+		for _, f := range outputFormats {
+			p := f.Path
+			if seen[p] {
+				continue
+			}
+			seen[p] = true
+			targetBasePaths = append(targetBasePaths, p)
 
-func (c *contentHandlers) createResource() contentHandler {
-	return func(ctx *handlerContext) handlerResult {
-		if ctx.parentPage == nil {
-			return notHandled
 		}
 
 		resource, err := c.s.ResourceSpec.New(
 			resources.ResourceSourceDescriptor{
-				TargetPathBuilder: ctx.parentPage.subResourceTargetPathFactory,
+				TargetPaths:       ctx.parentPage.getTargetPaths,
 				SourceFile:        ctx.source,
 				RelTargetFilename: ctx.target,
-				URLBase:           c.s.GetURLLanguageBasePath(),
-				TargetBasePaths:   []string{c.s.GetTargetLanguageBasePath()},
+				TargetBasePaths:   targetBasePaths,
 			})
 
-		return handlerResult{err: err, handled: true, resource: resource}
+		return handlerResult{err: err, handled: true, result: resource}
 	}
 }
 
diff --git a/hugolib/pagebundler_test.go b/hugolib/pagebundler_test.go
index ab047205911..83b8fac5255 100644
--- a/hugolib/pagebundler_test.go
+++ b/hugolib/pagebundler_test.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -14,12 +14,15 @@
 package hugolib
 
 import (
-	"github.com/gohugoio/hugo/common/loggers"
-
 	"os"
+	"path"
 	"runtime"
+	"strings"
 	"testing"
 
+	"github.com/gohugoio/hugo/common/loggers"
+	"github.com/gohugoio/hugo/resources/page"
+
 	"github.com/gohugoio/hugo/helpers"
 
 	"io"
@@ -47,7 +50,11 @@ func TestPageBundlerSiteRegular(t *testing.T) {
 	for _, baseURLPath := range []string{"", "/hugo"} {
 		for _, canonify := range []bool{false, true} {
 			for _, ugly := range []bool{false, true} {
-				t.Run(fmt.Sprintf("ugly=%t,canonify=%t,path=%s", ugly, canonify, baseURLPath),
+				baseURLPathId := baseURLPath
+				if baseURLPathId == "" {
+					baseURLPathId = "NONE"
+				}
+				t.Run(fmt.Sprintf("ugly=%t,canonify=%t,path=%s", ugly, canonify, baseURLPathId),
 					func(t *testing.T) {
 						baseURL := baseBaseURL + baseURLPath
 						relURLBase := baseURLPath
@@ -70,9 +77,10 @@ func TestPageBundlerSiteRegular(t *testing.T) {
 
 						cfg.Set("outputFormats", map[string]interface{}{
 							"CUSTOMO": map[string]interface{}{
-								"mediaType": media.HTMLType,
-								"baseName":  "cindex",
-								"path":      "cpath",
+								"mediaType":     media.HTMLType,
+								"baseName":      "cindex",
+								"path":          "cpath",
+								"permalinkable": true,
 							},
 						})
 
@@ -84,70 +92,92 @@ func TestPageBundlerSiteRegular(t *testing.T) {
 
 						cfg.Set("uglyURLs", ugly)
 
-						s := buildSingleSite(t, deps.DepsCfg{Logger: loggers.NewWarningLogger(), Fs: fs, Cfg: cfg}, BuildCfg{})
+						s := buildSingleSite(t, deps.DepsCfg{Logger: loggers.NewErrorLogger(), Fs: fs, Cfg: cfg}, BuildCfg{})
 
 						th := testHelper{s.Cfg, s.Fs, t}
 
-						assert.Len(s.RegularPages, 8)
+						assert.Len(s.RegularPages(), 8)
 
-						singlePage := s.getPage(KindPage, "a/1.md")
+						singlePage := s.getPage(page.KindPage, "a/1.md")
 						assert.Equal("", singlePage.BundleType())
 
 						assert.NotNil(singlePage)
 						assert.Equal(singlePage, s.getPage("page", "a/1"))
 						assert.Equal(singlePage, s.getPage("page", "1"))
 
-						assert.Contains(singlePage.content(), "TheContent")
+						assert.Contains(content(singlePage), "TheContent")
 
-						if ugly {
-							assert.Equal(relURLBase+"/a/1.html", singlePage.RelPermalink())
-							th.assertFileContent(filepath.FromSlash("/work/public/a/1.html"), "TheContent")
+						relFilename := func(basePath, outBase string) (string, string) {
+							rel := basePath
+							if ugly {
+								rel = strings.TrimSuffix(basePath, "/") + ".html"
+							}
 
-						} else {
-							assert.Equal(relURLBase+"/a/1/", singlePage.RelPermalink())
-							th.assertFileContent(filepath.FromSlash("/work/public/a/1/index.html"), "TheContent")
+							var filename string
+							if !ugly {
+								filename = path.Join(basePath, outBase)
+							} else {
+								filename = rel
+							}
+
+							rel = fmt.Sprintf("%s%s", relURLBase, rel)
+
+							return rel, filename
 						}
 
+						// Check both output formats
+						rel, filename := relFilename("/a/1/", "index.html")
+						th.assertFileContent(filepath.Join("/work/public", filename),
+							"TheContent",
+							"Single RelPermalink: "+rel,
+						)
+
+						rel, filename = relFilename("/cpath/a/1/", "cindex.html")
+
+						th.assertFileContent(filepath.Join("/work/public", filename),
+							"TheContent",
+							"Single RelPermalink: "+rel,
+						)
+
 						th.assertFileContent(filepath.FromSlash("/work/public/images/hugo-logo.png"), "content")
 
 						// This should be just copied to destination.
 						th.assertFileContent(filepath.FromSlash("/work/public/assets/pic1.png"), "content")
 
-						leafBundle1 := s.getPage(KindPage, "b/my-bundle/index.md")
+						leafBundle1 := s.getPage(page.KindPage, "b/my-bundle/index.md")
 						assert.NotNil(leafBundle1)
 						assert.Equal("leaf", leafBundle1.BundleType())
 						assert.Equal("b", leafBundle1.Section())
-						sectionB := s.getPage(KindSection, "b")
+						sectionB := s.getPage(page.KindSection, "b")
 						assert.NotNil(sectionB)
 						home, _ := s.Info.Home()
 						assert.Equal("branch", home.BundleType())
 
 						// This is a root bundle and should live in the "home section"
 						// See https://github.com/gohugoio/hugo/issues/4332
-						rootBundle := s.getPage(KindPage, "root")
+						rootBundle := s.getPage(page.KindPage, "root")
 						assert.NotNil(rootBundle)
 						assert.True(rootBundle.Parent().IsHome())
-						if ugly {
-							assert.Equal(relURLBase+"/root.html", rootBundle.RelPermalink())
-						} else {
-							assert.Equal(relURLBase+"/root/", rootBundle.RelPermalink())
+						if !ugly {
+							th.assertFileContent(filepath.FromSlash("/work/public/root/index.html"), "Single RelPermalink: "+relURLBase+"/root/")
+							th.assertFileContent(filepath.FromSlash("/work/public/cpath/root/cindex.html"), "Single RelPermalink: "+relURLBase+"/cpath/root/")
 						}
 
-						leafBundle2 := s.getPage(KindPage, "a/b/index.md")
+						leafBundle2 := s.getPage(page.KindPage, "a/b/index.md")
 						assert.NotNil(leafBundle2)
-						unicodeBundle := s.getPage(KindPage, "c/bundle/index.md")
+						unicodeBundle := s.getPage(page.KindPage, "c/bundle/index.md")
 						assert.NotNil(unicodeBundle)
 
-						pageResources := leafBundle1.Resources.ByType(pageResourceType)
+						pageResources := leafBundle1.Resources().ByType(pageResourceType)
 						assert.Len(pageResources, 2)
-						firstPage := pageResources[0].(*Page)
-						secondPage := pageResources[1].(*Page)
-						assert.Equal(filepath.FromSlash("/work/base/b/my-bundle/1.md"), firstPage.pathOrTitle(), secondPage.pathOrTitle())
-						assert.Contains(firstPage.content(), "TheContent")
-						assert.Equal(6, len(leafBundle1.Resources))
+						firstPage := pageResources[0].(page.Page)
+						secondPage := pageResources[1].(page.Page)
+						assert.Equal(filepath.FromSlash("/work/base/b/my-bundle/1.md"), firstPage.File().Filename(), secondPage.File().Filename())
+						assert.Contains(content(firstPage), "TheContent")
+						assert.Equal(6, len(leafBundle1.Resources()))
 
 						// Verify shortcode in bundled page
-						assert.Contains(secondPage.content(), filepath.FromSlash("MyShort in b/my-bundle/2.md"))
+						assert.Contains(content(secondPage), filepath.FromSlash("MyShort in b/my-bundle/2.md"))
 
 						// https://github.com/gohugoio/hugo/issues/4582
 						assert.Equal(leafBundle1, firstPage.Parent())
@@ -157,20 +187,10 @@ func TestPageBundlerSiteRegular(t *testing.T) {
 						assert.Equal(secondPage, pageResources.GetMatch("2*"))
 						assert.Nil(pageResources.GetMatch("doesnotexist*"))
 
-						imageResources := leafBundle1.Resources.ByType("image")
+						imageResources := leafBundle1.Resources().ByType("image")
 						assert.Equal(3, len(imageResources))
-						image := imageResources[0]
-
-						altFormat := leafBundle1.OutputFormats().Get("CUSTOMO")
-						assert.NotNil(altFormat)
-
-						assert.Equal(baseURL+"/2017/pageslug/c/logo.png", image.Permalink())
 
-						th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/c/logo.png"), "content")
-						th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/c/logo.png"), "content")
-
-						// Custom media type defined in site config.
-						assert.Len(leafBundle1.Resources.ByType("bepsays"), 1)
+						assert.NotNil(leafBundle1.OutputFormats().Get("CUSTOMO"))
 
 						relPermalinker := func(s string) string {
 							return fmt.Sprintf(s, relURLBase)
@@ -180,12 +200,33 @@ func TestPageBundlerSiteRegular(t *testing.T) {
 							return fmt.Sprintf(s, baseURL)
 						}
 
-						if permalinker == nil {
+						if ugly {
+							th.assertFileContent("/work/public/2017/pageslug.html",
+								relPermalinker("Single RelPermalink: %s/2017/pageslug.html"),
+								permalinker("Single Permalink: %s/2017/pageslug.html"),
+								relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"),
+								permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg"))
+						} else {
+							th.assertFileContent("/work/public/2017/pageslug/index.html",
+								relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"),
+								permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg"))
+
+							th.assertFileContent("/work/public/cpath/2017/pageslug/cindex.html",
+								relPermalinker("Single RelPermalink: %s/cpath/2017/pageslug/"),
+								relPermalinker("Short Sunset RelPermalink: %s/cpath/2017/pageslug/sunset2.jpg"),
+								relPermalinker("Sunset RelPermalink: %s/cpath/2017/pageslug/sunset1.jpg"),
+								permalinker("Sunset Permalink: %s/cpath/2017/pageslug/sunset1.jpg"),
+							)
 						}
 
+						th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/c/logo.png"), "content")
+						th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/c/logo.png"), "content")
+						th.assertFileNotExist("/work/public/cpath/cpath/2017/pageslug/c/logo.png")
+
+						// Custom media type defined in site config.
+						assert.Len(leafBundle1.Resources().ByType("bepsays"), 1)
+
 						if ugly {
-							assert.Equal(relURLBase+"/2017/pageslug.html", leafBundle1.RelPermalink())
-							assert.Equal(baseURL+"/2017/pageslug.html", leafBundle1.Permalink())
 							th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug.html"),
 								"TheContent",
 								relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"),
@@ -202,23 +243,15 @@ func TestPageBundlerSiteRegular(t *testing.T) {
 							)
 							th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug.html"), "TheContent")
 
-							assert.Equal(relURLBase+"/a/b.html", leafBundle2.RelPermalink())
-
 							// 은행
-							assert.Equal(relURLBase+"/c/%EC%9D%80%ED%96%89.html", unicodeBundle.RelPermalink())
-							th.assertFileContent(filepath.FromSlash("/work/public/c/은행.html"), "Content for 은행")
 							th.assertFileContent(filepath.FromSlash("/work/public/c/은행/logo-은행.png"), "은행 PNG")
 
 						} else {
-							assert.Equal(relURLBase+"/2017/pageslug/", leafBundle1.RelPermalink())
-							assert.Equal(baseURL+"/2017/pageslug/", leafBundle1.Permalink())
 							th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/index.html"), "TheContent")
 							th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/cindex.html"), "TheContent")
 							th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/index.html"), "Single Title")
 							th.assertFileContent(filepath.FromSlash("/work/public/root/index.html"), "Single Title")
 
-							assert.Equal(relURLBase+"/a/b/", leafBundle2.RelPermalink())
-
 						}
 
 					})
@@ -249,11 +282,11 @@ func TestPageBundlerSiteMultilingual(t *testing.T) {
 
 				s := sites.Sites[0]
 
-				assert.Equal(8, len(s.RegularPages))
-				assert.Equal(16, len(s.Pages))
-				assert.Equal(31, len(s.AllPages))
+				assert.Equal(8, len(s.RegularPages()))
+				assert.Equal(16, len(s.Pages()))
+				assert.Equal(31, len(s.AllPages()))
 
-				bundleWithSubPath := s.getPage(KindPage, "lb/index")
+				bundleWithSubPath := s.getPage(page.KindPage, "lb/index")
 				assert.NotNil(bundleWithSubPath)
 
 				// See https://github.com/gohugoio/hugo/issues/4312
@@ -267,30 +300,30 @@ func TestPageBundlerSiteMultilingual(t *testing.T) {
 				// and probably also just b (aka "my-bundle")
 				// These may also be translated, so we also need to test that.
 				//  "bf", "my-bf-bundle", "index.md + nn
-				bfBundle := s.getPage(KindPage, "bf/my-bf-bundle/index")
+				bfBundle := s.getPage(page.KindPage, "bf/my-bf-bundle/index")
 				assert.NotNil(bfBundle)
-				assert.Equal("en", bfBundle.Lang())
-				assert.Equal(bfBundle, s.getPage(KindPage, "bf/my-bf-bundle/index.md"))
-				assert.Equal(bfBundle, s.getPage(KindPage, "bf/my-bf-bundle"))
-				assert.Equal(bfBundle, s.getPage(KindPage, "my-bf-bundle"))
+				assert.Equal("en", bfBundle.Language().Lang)
+				assert.Equal(bfBundle, s.getPage(page.KindPage, "bf/my-bf-bundle/index.md"))
+				assert.Equal(bfBundle, s.getPage(page.KindPage, "bf/my-bf-bundle"))
+				assert.Equal(bfBundle, s.getPage(page.KindPage, "my-bf-bundle"))
 
 				nnSite := sites.Sites[1]
-				assert.Equal(7, len(nnSite.RegularPages))
+				assert.Equal(7, len(nnSite.RegularPages()))
 
-				bfBundleNN := nnSite.getPage(KindPage, "bf/my-bf-bundle/index")
+				bfBundleNN := nnSite.getPage(page.KindPage, "bf/my-bf-bundle/index")
 				assert.NotNil(bfBundleNN)
-				assert.Equal("nn", bfBundleNN.Lang())
-				assert.Equal(bfBundleNN, nnSite.getPage(KindPage, "bf/my-bf-bundle/index.nn.md"))
-				assert.Equal(bfBundleNN, nnSite.getPage(KindPage, "bf/my-bf-bundle"))
-				assert.Equal(bfBundleNN, nnSite.getPage(KindPage, "my-bf-bundle"))
+				assert.Equal("nn", bfBundleNN.Language().Lang)
+				assert.Equal(bfBundleNN, nnSite.getPage(page.KindPage, "bf/my-bf-bundle/index.nn.md"))
+				assert.Equal(bfBundleNN, nnSite.getPage(page.KindPage, "bf/my-bf-bundle"))
+				assert.Equal(bfBundleNN, nnSite.getPage(page.KindPage, "my-bf-bundle"))
 
 				// See https://github.com/gohugoio/hugo/issues/4295
 				// Every resource should have its Name prefixed with its base folder.
-				cBundleResources := bundleWithSubPath.Resources.Match("c/**")
+				cBundleResources := bundleWithSubPath.Resources().Match("c/**")
 				assert.Equal(4, len(cBundleResources))
-				bundlePage := bundleWithSubPath.Resources.GetMatch("c/page*")
+				bundlePage := bundleWithSubPath.Resources().GetMatch("c/page*")
 				assert.NotNil(bundlePage)
-				assert.IsType(&Page{}, bundlePage)
+				assert.IsType(&pageState{}, bundlePage)
 
 			})
 	}
@@ -329,15 +362,15 @@ func TestMultilingualDisableLanguage(t *testing.T) {
 
 	s := sites.Sites[0]
 
-	assert.Equal(8, len(s.RegularPages))
-	assert.Equal(16, len(s.Pages))
+	assert.Equal(8, len(s.RegularPages()))
+	assert.Equal(16, len(s.Pages()))
 	// No nn pages
-	assert.Equal(16, len(s.AllPages))
+	assert.Equal(16, len(s.AllPages()))
 	for _, p := range s.rawAllPages {
-		assert.True(p.Lang() != "nn")
+		assert.True(p.Language().Lang != "nn")
 	}
-	for _, p := range s.AllPages {
-		assert.True(p.Lang() != "nn")
+	for _, p := range s.AllPages() {
+		assert.True(p.Language().Lang != "nn")
 	}
 
 }
@@ -358,11 +391,11 @@ func TestPageBundlerSiteWitSymbolicLinksInContent(t *testing.T) {
 
 	th := testHelper{s.Cfg, s.Fs, t}
 
-	assert.Equal(7, len(s.RegularPages))
-	a1Bundle := s.getPage(KindPage, "symbolic2/a1/index.md")
+	assert.Equal(7, len(s.RegularPages()))
+	a1Bundle := s.getPage(page.KindPage, "symbolic2/a1/index.md")
 	assert.NotNil(a1Bundle)
-	assert.Equal(2, len(a1Bundle.Resources))
-	assert.Equal(1, len(a1Bundle.Resources.ByType(pageResourceType)))
+	assert.Equal(2, len(a1Bundle.Resources()))
+	assert.Equal(1, len(a1Bundle.Resources().ByType(pageResourceType)))
 
 	th.assertFileContent(filepath.FromSlash(workDir+"/public/a/page/index.html"), "TheContent")
 	th.assertFileContent(filepath.FromSlash(workDir+"/public/symbolic1/s1/index.html"), "TheContent")
@@ -416,28 +449,27 @@ HEADLESS {{< myShort >}}
 
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
 
-	assert.Equal(1, len(s.RegularPages))
+	assert.Equal(1, len(s.RegularPages()))
 	assert.Equal(1, len(s.headlessPages))
 
-	regular := s.getPage(KindPage, "a/index")
+	regular := s.getPage(page.KindPage, "a/index")
 	assert.Equal("/a/s1/", regular.RelPermalink())
 
-	headless := s.getPage(KindPage, "b/index")
+	headless := s.getPage(page.KindPage, "b/index")
 	assert.NotNil(headless)
-	assert.True(headless.headless)
 	assert.Equal("Headless Bundle in Topless Bar", headless.Title())
 	assert.Equal("", headless.RelPermalink())
 	assert.Equal("", headless.Permalink())
-	assert.Contains(headless.content(), "HEADLESS SHORTCODE")
+	assert.Contains(content(headless), "HEADLESS SHORTCODE")
 
-	headlessResources := headless.Resources
+	headlessResources := headless.Resources()
 	assert.Equal(3, len(headlessResources))
 	assert.Equal(2, len(headlessResources.Match("l*")))
 	pageResource := headlessResources.GetMatch("p*")
 	assert.NotNil(pageResource)
-	assert.IsType(&Page{}, pageResource)
-	p := pageResource.(*Page)
-	assert.Contains(p.content(), "SHORTCODE")
+	assert.IsType(&pageState{}, pageResource)
+	p := pageResource.(page.Page)
+	assert.Contains(content(p), "SHORTCODE")
 	assert.Equal("p1.md", p.Name())
 
 	th := testHelper{s.Cfg, s.Fs, t}
@@ -451,6 +483,56 @@ HEADLESS {{< myShort >}}
 
 }
 
+func TestHeadlessBundleMultiSite(t *testing.T) {
+	assert := require.New(t)
+	b := newTestSitesBuilder(t)
+	b.WithConfigFile("toml", `
+
+baseURL = "http://example.com/"
+
+defaultContentLanguage = "en"
+
+[languages]
+[languages.en]
+weight = 10
+contentDir = "content/en"
+[languages.nn]
+weight = 20
+contentDir = "content/nn"
+
+
+`)
+
+	b.WithContent("en/mybundle/index.md", `
+---
+headless: true
+---
+
+`)
+
+	b.WithContent("nn/mybundle/index.md", `
+---
+headless: true
+---
+
+`)
+
+	b.WithContent("en/mybundle/data.yaml", `data en`)
+	b.WithContent("en/mybundle/forms.yaml", `forms en`)
+	b.WithContent("nn/mybundle/data.yaml", `data nn`)
+
+	b.Build(BuildCfg{})
+
+	b.AssertFileContent("public/nn/mybundle/data.yaml", "data nn")
+	b.AssertFileContent("public/nn/mybundle/forms.yaml", "forms en")
+	b.AssertFileContent("public/mybundle/data.yaml", "data en")
+	b.AssertFileContent("public/mybundle/forms.yaml", "forms en")
+
+	assert.False(b.CheckExists("public/nn/nn/mybundle/data.yaml"))
+	assert.False(b.CheckExists("public/en/mybundle/data.yaml"))
+
+}
+
 func newTestBundleSources(t *testing.T) (*hugofs.Fs, *viper.Viper) {
 	cfg, fs := newTestCfg()
 	assert := require.New(t)
@@ -512,6 +594,8 @@ TheContent.
 
 	singleLayout := `
 Single Title: {{ .Title }}
+Single RelPermalink: {{ .RelPermalink }}
+Single Permalink: {{ .Permalink }}
 Content: {{ .Content }}
 {{ $sunset := .Resources.GetMatch "my-sunset-1*" }}
 {{ with $sunset }}
@@ -532,7 +616,7 @@ Thumb RelPermalink: {{ $thumb.RelPermalink }}
 `
 
 	myShort := `
-MyShort in {{ .Page.Path }}:
+MyShort in {{ .Page.File.Path }}:
 {{ $sunset := .Page.Resources.GetMatch "my-sunset-2*" }}
 {{ with $sunset }}
 Short Sunset RelPermalink: {{ .RelPermalink }}
@@ -599,6 +683,7 @@ Content for 은행.
 	assert.NoError(err)
 
 	_, err = io.Copy(out, src)
+	assert.NoError(err)
 	out.Close()
 	src.Seek(0, 0)
 	_, err = io.Copy(out2, src)
diff --git a/hugolib/pagecollections.go b/hugolib/pagecollections.go
index 78325344b60..f62ea0905dc 100644
--- a/hugolib/pagecollections.go
+++ b/hugolib/pagecollections.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -18,43 +18,65 @@ import (
 	"path"
 	"path/filepath"
 	"strings"
+	"sync"
+
+	"github.com/pkg/errors"
 
 	"github.com/gohugoio/hugo/cache"
 	"github.com/gohugoio/hugo/helpers"
+	"github.com/gohugoio/hugo/resources/page"
 )
 
+// Used in the page cache to mark more than one hit for a given key.
+var ambiguityFlag = &pageState{}
+
 // PageCollections contains the page collections for a site.
 type PageCollections struct {
-	// Includes only pages of all types, and only pages in the current language.
-	Pages Pages
 
-	// Includes all pages in all languages, including the current one.
-	// Includes pages of all types.
-	AllPages Pages
+	// Includes absolute all pages (of all types), including drafts etc.
+	rawAllPages pageStatePages
 
-	// A convenience cache for the traditional index types, taxonomies, home page etc.
-	// This is for the current language only.
-	indexPages Pages
+	// rawAllPages plus additional pages created during the build process.
+	workAllPages pageStatePages
 
-	// A convenience cache for the regular pages.
-	// This is for the current language only.
-	RegularPages Pages
+	// Includes headless bundles, i.e. bundles that produce no output for its content page.
+	headlessPages pageStatePages
 
-	// A convenience cache for the all the regular pages.
-	AllRegularPages Pages
+	// Lazy initialized page collections
+	pages           *lazyPagesFactory
+	regularPages    *lazyPagesFactory
+	allPages        *lazyPagesFactory
+	allRegularPages *lazyPagesFactory
 
-	// Includes absolute all pages (of all types), including drafts etc.
-	rawAllPages Pages
+	// The index for .Site.GetPage etc.
+	pageIndex *cache.Lazy
+}
 
-	// Includes headless bundles, i.e. bundles that produce no output for its content page.
-	headlessPages Pages
+// Pages returns all pages.
+// This is for the current language only.
+func (c *PageCollections) Pages() page.Pages {
+	return c.pages.get()
+}
 
-	pageIndex *cache.Lazy
+// RegularPages returns all the regular pages.
+// This is for the current language only.
+func (c *PageCollections) RegularPages() page.Pages {
+	return c.regularPages.get()
+}
+
+// AllPages returns all pages for all languages.
+func (c *PageCollections) AllPages() page.Pages {
+	return c.allPages.get()
+}
+
+// AllPages returns all regular pages for all languages.
+func (c *PageCollections) AllRegularPages() page.Pages {
+	return c.allRegularPages.get()
 }
 
 // Get initializes the index if not already done so, then
 // looks up the given page ref, returns nil if no value found.
-func (c *PageCollections) getFromCache(ref string) (*Page, error) {
+func (c *PageCollections) getFromCache(ref string) (page.Page, error) {
 	v, found, err := c.pageIndex.Get(ref)
 	if err != nil {
 		return nil, err
@@ -63,7 +85,7 @@ func (c *PageCollections) getFromCache(ref string) (*Page, error) {
 		return nil, nil
 	}
 
-	p := v.(*Page)
+	p := v.(page.Page)
 
 	if p != ambiguityFlag {
 		return p, nil
@@ -71,17 +93,49 @@ func (c *PageCollections) getFromCache(ref string) (*Page, error) {
 	return nil, fmt.Errorf("page reference %q is ambiguous", ref)
 }
 
-var ambiguityFlag = &Page{Kind: kindUnknown, title: "ambiguity flag"}
+type lazyPagesFactory struct {
+	pages page.Pages
 
-func (c *PageCollections) refreshPageCaches() {
-	c.indexPages = c.findPagesByKindNotIn(KindPage, c.Pages)
-	c.RegularPages = c.findPagesByKindIn(KindPage, c.Pages)
-	c.AllRegularPages = c.findPagesByKindIn(KindPage, c.AllPages)
+	init    sync.Once
+	factory page.PagesFactory
+}
 
-	indexLoader := func() (map[string]interface{}, error) {
+func (l *lazyPagesFactory) get() page.Pages {
+	l.init.Do(func() {
+		l.pages = l.factory()
+	})
+	return l.pages
+}
+
+func newLazyPagesFactory(factory page.PagesFactory) *lazyPagesFactory {
+	return &lazyPagesFactory{factory: factory}
+}
+
+func newPageCollections() *PageCollections {
+	return newPageCollectionsFromPages(nil)
+}
+
+func newPageCollectionsFromPages(pages pageStatePages) *PageCollections {
+
+	c := &PageCollections{rawAllPages: pages}
+
+	c.pages = newLazyPagesFactory(func() page.Pages {
+		pages := make(page.Pages, len(c.workAllPages))
+		for i, p := range c.workAllPages {
+			pages[i] = p
+		}
+		return pages
+	})
+
+	c.regularPages = newLazyPagesFactory(func() page.Pages {
+		return c.findPagesByKindInWorkPages(page.KindPage, c.workAllPages)
+	})
+
+	c.pageIndex = cache.NewLazy(func() (map[string]interface{}, error) {
 		index := make(map[string]interface{})
 
-		add := func(ref string, p *Page) {
+		add := func(ref string, p page.Page) {
+			ref = strings.ToLower(ref)
 			existing := index[ref]
 			if existing == nil {
 				index[ref] = p
@@ -90,71 +144,63 @@ func (c *PageCollections) refreshPageCaches() {
 			}
 		}
 
-		for _, pageCollection := range []Pages{c.RegularPages, c.headlessPages} {
+		for _, pageCollection := range []pageStatePages{c.workAllPages, c.headlessPages} {
 			for _, p := range pageCollection {
-				sourceRef := p.absoluteSourceRef()
+				if p.IsPage() {
+					sourceRef := p.sourceRef()
 
-				if sourceRef != "" {
-					// index the canonical ref
-					// e.g. /section/article.md
-					add(sourceRef, p)
-				}
+					if sourceRef != "" {
+						// index the canonical ref
+						// e.g. /section/article.md
+						add(sourceRef, p)
+					}
+
+					// Ref/Relref supports this potentially ambiguous lookup.
+					add(p.File().LogicalName(), p)
 
-				// Ref/Relref supports this potentially ambiguous lookup.
-				add(p.LogicalName(), p)
+					translationBaseName := p.File().TranslationBaseName()
 
-				translationBaseName := p.TranslationBaseName()
+					dir, _ := path.Split(sourceRef)
+					dir = strings.TrimSuffix(dir, "/")
 
-				dir, _ := path.Split(sourceRef)
-				dir = strings.TrimSuffix(dir, "/")
+					if translationBaseName == "index" {
+						add(dir, p)
+						add(path.Base(dir), p)
+					} else {
+						add(translationBaseName, p)
+					}
 
-				if translationBaseName == "index" {
-					add(dir, p)
-					add(path.Base(dir), p)
+					// We need a way to get to the current language version.
+					pathWithNoExtensions := path.Join(dir, translationBaseName)
+					add(pathWithNoExtensions, p)
 				} else {
-					add(translationBaseName, p)
+					// index the canonical, unambiguous ref for any backing file
+					// e.g. /section/_index.md
+					sourceRef := p.sourceRef()
+					if sourceRef != "" {
+						add(sourceRef, p)
+					}
+
+					ref := p.SectionsPath()
+
+					// index the canonical, unambiguous virtual ref
+					// e.g. /section
+					// (this may already have been indexed above)
+					add("/"+ref, p)
 				}
-
-				// We need a way to get to the current language version.
-				pathWithNoExtensions := path.Join(dir, translationBaseName)
-				add(pathWithNoExtensions, p)
-			}
-		}
-
-		for _, p := range c.indexPages {
-			// index the canonical, unambiguous ref for any backing file
-			// e.g. /section/_index.md
-			sourceRef := p.absoluteSourceRef()
-			if sourceRef != "" {
-				add(sourceRef, p)
 			}
-
-			ref := path.Join(p.sections...)
-
-			// index the canonical, unambiguous virtual ref
-			// e.g. /section
-			// (this may already have been indexed above)
-			add("/"+ref, p)
 		}
 
 		return index, nil
-	}
+	})
 
-	c.pageIndex = cache.NewLazy(indexLoader)
-}
-
-func newPageCollections() *PageCollections {
-	return &PageCollections{}
-}
-
-func newPageCollectionsFromPages(pages Pages) *PageCollections {
-	return &PageCollections{rawAllPages: pages}
+	return c
 }
 
 // This is an adapter func for the old API with Kind as first argument.
 // This is invoked when you do .Site.GetPage. We drop the Kind and fails
 // if there are more than 2 arguments, which would be ambigous.
-func (c *PageCollections) getPageOldVersion(ref ...string) (*Page, error) {
+func (c *PageCollections) getPageOldVersion(ref ...string) (page.Page, error) {
 	var refs []string
 	for _, r := range ref {
 		// A common construct in the wild is
@@ -173,10 +219,10 @@ func (c *PageCollections) getPageOldVersion(ref ...string) (*Page, error) {
 		return nil, fmt.Errorf(`too many arguments to .Site.GetPage: %v. Use lookups on the form {{ .Site.GetPage "/posts/mypage-md" }}`, ref)
 	}
 
-	if len(refs) == 0 || refs[0] == KindHome {
+	if len(refs) == 0 || refs[0] == page.KindHome {
 		key = "/"
 	} else if len(refs) == 1 {
-		if len(ref) == 2 && refs[0] == KindSection {
+		if len(ref) == 2 && refs[0] == page.KindSection {
 			// This is an old style reference to the "Home Page section".
 			// Typically fetched via {{ .Site.GetPage "section" .Section }}
 			// See https://github.com/gohugoio/hugo/issues/4989
@@ -197,17 +243,18 @@ func (c *PageCollections) getPageOldVersion(ref ...string) (*Page, error) {
 }
 
 // 	Only used in tests.
-func (c *PageCollections) getPage(typ string, sections ...string) *Page {
+func (c *PageCollections) getPage(typ string, sections ...string) page.Page {
 	refs := append([]string{typ}, path.Join(sections...))
 	p, _ := c.getPageOldVersion(refs...)
 	return p
 }
 
-// Ref is either unix-style paths (i.e. callers responsible for
-// calling filepath.ToSlash as necessary) or shorthand refs.
-func (c *PageCollections) getPageNew(context *Page, ref string) (*Page, error) {
+// Case insensitive page lookup.
+func (c *PageCollections) getPageNew(context page.Page, ref string) (page.Page, error) {
 	var anError error
 
+	ref = strings.ToLower(ref)
+
 	// Absolute (content root relative) reference.
 	if strings.HasPrefix(ref, "/") {
 		p, err := c.getFromCache(ref)
@@ -220,7 +267,7 @@ func (c *PageCollections) getPageNew(context *Page, ref string) (*Page, error) {
 
 	} else if context != nil {
 		// Try the page-relative path.
-		ppath := path.Join("/", strings.Join(context.sections, "/"), ref)
+		ppath := path.Join("/", strings.ToLower(context.SectionsPath()), ref)
 		p, err := c.getFromCache(ppath)
 		if err == nil && p != nil {
 			return p, nil
@@ -236,7 +283,8 @@ func (c *PageCollections) getPageNew(context *Page, ref string) (*Page, error) {
 		if err == nil && p != nil {
 			if context != nil {
 				// TODO(bep) remove this case and the message below when the storm has passed
-				helpers.DistinctFeedbackLog.Printf(`WARNING: make non-relative ref/relref page reference(s) in page %q absolute, e.g. {{< ref "/blog/my-post.md" >}}`, context.absoluteSourceRef())
+				err := wrapErr(errors.New(`make non-relative ref/relref page reference(s) in page %q absolute, e.g. {{< ref "/blog/my-post.md" >}}`), context)
+				helpers.DistinctWarnLog.Println(err)
 			}
 			return p, nil
 		}
@@ -253,49 +301,56 @@ func (c *PageCollections) getPageNew(context *Page, ref string) (*Page, error) {
 	}
 
 	if p == nil && anError != nil {
-		if context != nil {
-			return nil, fmt.Errorf("failed to resolve path from page %q: %s", context.absoluteSourceRef(), anError)
-		}
-		return nil, fmt.Errorf("failed to resolve page: %s", anError)
+		return nil, wrapErr(errors.Wrap(anError, "failed to resolve ref"), context)
 	}
 
 	return p, nil
 }
 
-func (*PageCollections) findPagesByKindIn(kind string, inPages Pages) Pages {
-	var pages Pages
+func (*PageCollections) findPagesByKindIn(kind string, inPages page.Pages) page.Pages {
+	var pages page.Pages
 	for _, p := range inPages {
-		if p.Kind == kind {
+		if p.Kind() == kind {
 			pages = append(pages, p)
 		}
 	}
 	return pages
 }
 
-func (*PageCollections) findFirstPageByKindIn(kind string, inPages Pages) *Page {
-	for _, p := range inPages {
-		if p.Kind == kind {
-			return p
+func (c *PageCollections) findPagesByKind(kind string) page.Pages {
+	return c.findPagesByKindIn(kind, c.Pages())
+}
+
+func (c *PageCollections) findWorkPagesByKind(kind string) pageStatePages {
+	var pages pageStatePages
+	for _, p := range c.workAllPages {
+		if p.Kind() == kind {
+			pages = append(pages, p)
 		}
 	}
-	return nil
+	return pages
 }
 
-func (*PageCollections) findPagesByKindNotIn(kind string, inPages Pages) Pages {
-	var pages Pages
+func (*PageCollections) findPagesByKindInWorkPages(kind string, inPages pageStatePages) page.Pages {
+	var pages page.Pages
 	for _, p := range inPages {
-		if p.Kind != kind {
+		if p.Kind() == kind {
 			pages = append(pages, p)
 		}
 	}
 	return pages
 }
 
-func (c *PageCollections) findPagesByKind(kind string) Pages {
-	return c.findPagesByKindIn(kind, c.Pages)
+func (c *PageCollections) findFirstWorkPageByKindIn(kind string) *pageState {
+	for _, p := range c.workAllPages {
+		if p.Kind() == kind {
+			return p
+		}
+	}
+	return nil
 }
 
-func (c *PageCollections) addPage(page *Page) {
+func (c *PageCollections) addPage(page *pageState) {
 	c.rawAllPages = append(c.rawAllPages, page)
 }
 
@@ -307,35 +362,31 @@ func (c *PageCollections) removePageFilename(filename string) {
 
 }
 
-func (c *PageCollections) removePage(page *Page) {
+func (c *PageCollections) removePage(page *pageState) {
 	if i := c.rawAllPages.findPagePos(page); i >= 0 {
 		c.clearResourceCacheForPage(c.rawAllPages[i])
 		c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...)
 	}
-
 }
 
-func (c *PageCollections) findPagesByShortcode(shortcode string) Pages {
-	var pages Pages
-
+func (c *PageCollections) findPagesByShortcode(shortcode string) page.Pages {
+	var pages page.Pages
 	for _, p := range c.rawAllPages {
-		if p.shortcodeState != nil {
-			if _, ok := p.shortcodeState.nameSet[shortcode]; ok {
-				pages = append(pages, p)
-			}
+		if p.HasShortcode(shortcode) {
+			pages = append(pages, p)
 		}
 	}
 	return pages
 }
 
-func (c *PageCollections) replacePage(page *Page) {
+func (c *PageCollections) replacePage(page *pageState) {
 	// will find existing page that matches filepath and remove it
 	c.removePage(page)
 	c.addPage(page)
 }
 
-func (c *PageCollections) clearResourceCacheForPage(page *Page) {
-	if len(page.Resources) > 0 {
-		page.s.ResourceSpec.DeleteCacheByPrefix(page.relTargetPathBase)
+func (c *PageCollections) clearResourceCacheForPage(page *pageState) {
+	if len(page.resources) > 0 {
+		page.s.ResourceSpec.DeleteCacheByPrefix(page.targetPaths().SubResourceBaseTarget)
 	}
 }
diff --git a/hugolib/pagecollections_test.go b/hugolib/pagecollections_test.go
index 2f8b3149044..a5a347f83cd 100644
--- a/hugolib/pagecollections_test.go
+++ b/hugolib/pagecollections_test.go
@@ -1,4 +1,4 @@
-// Copyright 2017 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -21,6 +21,8 @@ import (
 	"testing"
 	"time"
 
+	"github.com/gohugoio/hugo/resources/page"
+
 	"github.com/gohugoio/hugo/deps"
 	"github.com/stretchr/testify/require"
 )
@@ -98,12 +100,12 @@ func BenchmarkGetPageRegular(b *testing.B) {
 
 type testCase struct {
 	kind          string
-	context       *Page
+	context       page.Page
 	path          []string
 	expectedTitle string
 }
 
-func (t *testCase) check(p *Page, err error, errorMsg string, assert *require.Assertions) {
+func (t *testCase) check(p page.Page, err error, errorMsg string, assert *require.Assertions) {
 	switch t.kind {
 	case "Ambiguous":
 		assert.Error(err)
@@ -114,8 +116,8 @@ func (t *testCase) check(p *Page, err error, errorMsg string, assert *require.As
 	default:
 		assert.NoError(err, errorMsg)
 		assert.NotNil(p, errorMsg)
-		assert.Equal(t.kind, p.Kind, errorMsg)
-		assert.Equal(t.expectedTitle, p.title, errorMsg)
+		assert.Equal(t.kind, p.Kind(), errorMsg)
+		assert.Equal(t.expectedTitle, p.Title(), errorMsg)
 	}
 }
 
@@ -159,62 +161,62 @@ func TestGetPage(t *testing.T) {
 
 	tests := []testCase{
 		// legacy content root relative paths
-		{KindHome, nil, []string{}, "home page"},
-		{KindPage, nil, []string{"about.md"}, "about page"},
-		{KindSection, nil, []string{"sect3"}, "section 3"},
-		{KindPage, nil, []string{"sect3/page1.md"}, "Title3_1"},
-		{KindPage, nil, []string{"sect4/page2.md"}, "Title4_2"},
-		{KindSection, nil, []string{"sect3/sect7"}, "another sect7"},
-		{KindPage, nil, []string{"sect3/subsect/deep.md"}, "deep page"},
-		{KindPage, nil, []string{filepath.FromSlash("sect5/page3.md")}, "Title5_3"}, //test OS-specific path
+		{page.KindHome, nil, []string{}, "home page"},
+		{page.KindPage, nil, []string{"about.md"}, "about page"},
+		{page.KindSection, nil, []string{"sect3"}, "section 3"},
+		{page.KindPage, nil, []string{"sect3/page1.md"}, "Title3_1"},
+		{page.KindPage, nil, []string{"sect4/page2.md"}, "Title4_2"},
+		{page.KindSection, nil, []string{"sect3/sect7"}, "another sect7"},
+		{page.KindPage, nil, []string{"sect3/subsect/deep.md"}, "deep page"},
+		{page.KindPage, nil, []string{filepath.FromSlash("sect5/page3.md")}, "Title5_3"}, //test OS-specific path
 
 		// shorthand refs (potentially ambiguous)
-		{KindPage, nil, []string{"unique.md"}, "UniqueBase"},
+		{page.KindPage, nil, []string{"unique.md"}, "UniqueBase"},
 		{"Ambiguous", nil, []string{"page1.md"}, ""},
 
 		// ISSUE: This is an ambiguous ref, but because we have to support the legacy
 		// content root relative paths without a leading slash, the lookup
 		// returns /sect7. This undermines ambiguity detection, but we have no choice.
 		//{"Ambiguous", nil, []string{"sect7"}, ""},
-		{KindSection, nil, []string{"sect7"}, "Sect7s"},
+		{page.KindSection, nil, []string{"sect7"}, "Sect7s"},
 
 		// absolute paths
-		{KindHome, nil, []string{"/"}, "home page"},
-		{KindPage, nil, []string{"/about.md"}, "about page"},
-		{KindSection, nil, []string{"/sect3"}, "section 3"},
-		{KindPage, nil, []string{"/sect3/page1.md"}, "Title3_1"},
-		{KindPage, nil, []string{"/sect4/page2.md"}, "Title4_2"},
-		{KindSection, nil, []string{"/sect3/sect7"}, "another sect7"},
-		{KindPage, nil, []string{"/sect3/subsect/deep.md"}, "deep page"},
-		{KindPage, nil, []string{filepath.FromSlash("/sect5/page3.md")}, "Title5_3"}, //test OS-specific path
-		{KindPage, nil, []string{"/sect3/unique.md"}, "UniqueBase"},                  //next test depends on this page existing
+		{page.KindHome, nil, []string{"/"}, "home page"},
+		{page.KindPage, nil, []string{"/about.md"}, "about page"},
+		{page.KindSection, nil, []string{"/sect3"}, "section 3"},
+		{page.KindPage, nil, []string{"/sect3/page1.md"}, "Title3_1"},
+		{page.KindPage, nil, []string{"/sect4/page2.md"}, "Title4_2"},
+		{page.KindSection, nil, []string{"/sect3/sect7"}, "another sect7"},
+		{page.KindPage, nil, []string{"/sect3/subsect/deep.md"}, "deep page"},
+		{page.KindPage, nil, []string{filepath.FromSlash("/sect5/page3.md")}, "Title5_3"}, //test OS-specific path
+		{page.KindPage, nil, []string{"/sect3/unique.md"}, "UniqueBase"},                  //next test depends on this page existing
 		// {"NoPage", nil, []string{"/unique.md"}, ""},  // ISSUE #4969: this is resolving to /sect3/unique.md
 		{"NoPage", nil, []string{"/missing-page.md"}, ""},
 		{"NoPage", nil, []string{"/missing-section"}, ""},
 
 		// relative paths
-		{KindHome, sec3, []string{".."}, "home page"},
-		{KindHome, sec3, []string{"../"}, "home page"},
-		{KindPage, sec3, []string{"../about.md"}, "about page"},
-		{KindSection, sec3, []string{"."}, "section 3"},
-		{KindSection, sec3, []string{"./"}, "section 3"},
-		{KindPage, sec3, []string{"page1.md"}, "Title3_1"},
-		{KindPage, sec3, []string{"./page1.md"}, "Title3_1"},
-		{KindPage, sec3, []string{"../sect4/page2.md"}, "Title4_2"},
-		{KindSection, sec3, []string{"sect7"}, "another sect7"},
-		{KindSection, sec3, []string{"./sect7"}, "another sect7"},
-		{KindPage, sec3, []string{"./subsect/deep.md"}, "deep page"},
-		{KindPage, sec3, []string{"./subsect/../../sect7/page9.md"}, "Title7_9"},
-		{KindPage, sec3, []string{filepath.FromSlash("../sect5/page3.md")}, "Title5_3"}, //test OS-specific path
-		{KindPage, sec3, []string{"./unique.md"}, "UniqueBase"},
+		{page.KindHome, sec3, []string{".."}, "home page"},
+		{page.KindHome, sec3, []string{"../"}, "home page"},
+		{page.KindPage, sec3, []string{"../about.md"}, "about page"},
+		{page.KindSection, sec3, []string{"."}, "section 3"},
+		{page.KindSection, sec3, []string{"./"}, "section 3"},
+		{page.KindPage, sec3, []string{"page1.md"}, "Title3_1"},
+		{page.KindPage, sec3, []string{"./page1.md"}, "Title3_1"},
+		{page.KindPage, sec3, []string{"../sect4/page2.md"}, "Title4_2"},
+		{page.KindSection, sec3, []string{"sect7"}, "another sect7"},
+		{page.KindSection, sec3, []string{"./sect7"}, "another sect7"},
+		{page.KindPage, sec3, []string{"./subsect/deep.md"}, "deep page"},
+		{page.KindPage, sec3, []string{"./subsect/../../sect7/page9.md"}, "Title7_9"},
+		{page.KindPage, sec3, []string{filepath.FromSlash("../sect5/page3.md")}, "Title5_3"}, //test OS-specific path
+		{page.KindPage, sec3, []string{"./unique.md"}, "UniqueBase"},
 		{"NoPage", sec3, []string{"./sect2"}, ""},
 		//{"NoPage", sec3, []string{"sect2"}, ""}, // ISSUE: /sect3 page relative query is resolving to /sect2
 
 		// absolute paths ignore context
-		{KindHome, sec3, []string{"/"}, "home page"},
-		{KindPage, sec3, []string{"/about.md"}, "about page"},
-		{KindPage, sec3, []string{"/sect4/page2.md"}, "Title4_2"},
-		{KindPage, sec3, []string{"/sect3/subsect/deep.md"}, "deep page"}, //next test depends on this page existing
+		{page.KindHome, sec3, []string{"/"}, "home page"},
+		{page.KindPage, sec3, []string{"/about.md"}, "about page"},
+		{page.KindPage, sec3, []string{"/sect4/page2.md"}, "Title4_2"},
+		{page.KindPage, sec3, []string{"/sect3/subsect/deep.md"}, "deep page"}, //next test depends on this page existing
 		{"NoPage", sec3, []string{"/subsect/deep.md"}, ""},
 	}
 
diff --git a/hugolib/pages_language_merge_test.go b/hugolib/pages_language_merge_test.go
index efcfbf04b34..bae2ddd810b 100644
--- a/hugolib/pages_language_merge_test.go
+++ b/hugolib/pages_language_merge_test.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -21,6 +21,8 @@ import (
 	"github.com/stretchr/testify/require"
 )
 
+// TODO(bep) move and rewrite in resource/page.
+
 func TestMergeLanguages(t *testing.T) {
 	t.Parallel()
 	assert := require.New(t)
@@ -36,12 +38,12 @@ func TestMergeLanguages(t *testing.T) {
 	frSite := h.Sites[1]
 	nnSite := h.Sites[2]
 
-	assert.Equal(31, len(enSite.RegularPages))
-	assert.Equal(6, len(frSite.RegularPages))
-	assert.Equal(12, len(nnSite.RegularPages))
+	assert.Equal(31, len(enSite.RegularPages()))
+	assert.Equal(6, len(frSite.RegularPages()))
+	assert.Equal(12, len(nnSite.RegularPages()))
 
 	for i := 0; i < 2; i++ {
-		mergedNN := nnSite.RegularPages.MergeByLanguage(enSite.RegularPages)
+		mergedNN := nnSite.RegularPages().MergeByLanguage(enSite.RegularPages())
 		assert.Equal(31, len(mergedNN))
 		for i := 1; i <= 31; i++ {
 			expectedLang := "en"
@@ -49,11 +51,11 @@ func TestMergeLanguages(t *testing.T) {
 				expectedLang = "nn"
 			}
 			p := mergedNN[i-1]
-			assert.Equal(expectedLang, p.Lang(), fmt.Sprintf("Test %d", i))
+			assert.Equal(expectedLang, p.Language().Lang, fmt.Sprintf("Test %d", i))
 		}
 	}
 
-	mergedFR := frSite.RegularPages.MergeByLanguage(enSite.RegularPages)
+	mergedFR := frSite.RegularPages().MergeByLanguage(enSite.RegularPages())
 	assert.Equal(31, len(mergedFR))
 	for i := 1; i <= 31; i++ {
 		expectedLang := "en"
@@ -61,28 +63,28 @@ func TestMergeLanguages(t *testing.T) {
 			expectedLang = "fr"
 		}
 		p := mergedFR[i-1]
-		assert.Equal(expectedLang, p.Lang(), fmt.Sprintf("Test %d", i))
+		assert.Equal(expectedLang, p.Language().Lang, fmt.Sprintf("Test %d", i))
 	}
 
-	firstNN := nnSite.RegularPages[0]
+	firstNN := nnSite.RegularPages()[0]
 	assert.Equal(4, len(firstNN.Sites()))
 	assert.Equal("en", firstNN.Sites().First().Language().Lang)
 
 	nnBundle := nnSite.getPage("page", "bundle")
 	enBundle := enSite.getPage("page", "bundle")
 
-	assert.Equal(6, len(enBundle.Resources))
-	assert.Equal(2, len(nnBundle.Resources))
+	assert.Equal(6, len(enBundle.Resources()))
+	assert.Equal(2, len(nnBundle.Resources()))
 
-	var ri interface{} = nnBundle.Resources
+	var ri interface{} = nnBundle.Resources()
 
 	// This looks less ugly in the templates ...
-	mergedNNResources := ri.(resource.ResourcesLanguageMerger).MergeByLanguage(enBundle.Resources)
+	mergedNNResources := ri.(resource.ResourcesLanguageMerger).MergeByLanguage(enBundle.Resources())
 	assert.Equal(6, len(mergedNNResources))
 
-	unchanged, err := nnSite.RegularPages.MergeByLanguageInterface(nil)
+	unchanged, err := nnSite.RegularPages().MergeByLanguageInterface(nil)
 	assert.NoError(err)
-	assert.Equal(nnSite.RegularPages, unchanged)
+	assert.Equal(nnSite.RegularPages(), unchanged)
 
 }
 
@@ -93,7 +95,7 @@ func TestMergeLanguagesTemplate(t *testing.T) {
 	b.WithTemplates("home.html", `
 {{ $pages := .Site.RegularPages }}
 {{ .Scratch.Set "pages" $pages }}
-{{ if eq .Lang "nn" }}:
+{{ if eq .Language.Lang "nn" }}:
 {{ $enSite := index .Sites 0 }}
 {{ $frSite := index .Sites 1 }}
 {{ $nnBundle := .Site.GetPage "page" "bundle" }}
@@ -103,8 +105,8 @@ func TestMergeLanguagesTemplate(t *testing.T) {
 {{ end }}
 {{ $pages := .Scratch.Get "pages" }}
 {{ $pages2 := .Scratch.Get "pages2" }}
-Pages1: {{ range $i, $p := $pages }}{{ add $i 1 }}: {{ .Path }} {{ .Lang }} | {{ end }}
-Pages2: {{ range $i, $p := $pages2 }}{{ add $i 1 }}: {{ .Title }} {{ .Lang }} | {{ end }}
+Pages1: {{ range $i, $p := $pages }}{{ add $i 1 }}: {{ .File.Path }} {{ .Language.Lang }} | {{ end }}
+Pages2: {{ range $i, $p := $pages2 }}{{ add $i 1 }}: {{ .Title }} {{ .Language.Lang }} | {{ end }}
 
 `,
 		"shortcodes/shortcode.html", "MyShort",
@@ -178,7 +180,7 @@ func BenchmarkMergeByLanguage(b *testing.B) {
 	nnSite := h.Sites[2]
 
 	for i := 0; i < b.N; i++ {
-		merged := nnSite.RegularPages.MergeByLanguage(enSite.RegularPages)
+		merged := nnSite.RegularPages().MergeByLanguage(enSite.RegularPages())
 		if len(merged) != count {
 			b.Fatal("Count mismatch")
 		}
diff --git a/hugolib/pages_related_test.go b/hugolib/pages_related_test.go
deleted file mode 100644
index ed8d9df9d6d..00000000000
--- a/hugolib/pages_related_test.go
+++ /dev/null
@@ -1,75 +0,0 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
-	"fmt"
-	"path/filepath"
-	"testing"
-
-	"github.com/gohugoio/hugo/common/types"
-	"github.com/gohugoio/hugo/deps"
-
-	"github.com/stretchr/testify/require"
-)
-
-func TestRelated(t *testing.T) {
-	assert := require.New(t)
-
-	t.Parallel()
-
-	var (
-		cfg, fs = newTestCfg()
-		//th      = testHelper{cfg, fs, t}
-	)
-
-	pageTmpl := `---
-title: Page %d
-keywords: [%s]
-date: %s
----
-
-Content
-`
-
-	writeSource(t, fs, filepath.Join("content", "page1.md"), fmt.Sprintf(pageTmpl, 1, "hugo, says", "2017-01-03"))
-	writeSource(t, fs, filepath.Join("content", "page2.md"), fmt.Sprintf(pageTmpl, 2, "hugo, rocks", "2017-01-02"))
-	writeSource(t, fs, filepath.Join("content", "page3.md"), fmt.Sprintf(pageTmpl, 3, "bep, says", "2017-01-01"))
-
-	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
-	assert.Len(s.RegularPages, 3)
-
-	result, err := s.RegularPages.RelatedTo(types.NewKeyValuesStrings("keywords", "hugo", "rocks"))
-
-	assert.NoError(err)
-	assert.Len(result, 2)
-	assert.Equal("Page 2", result[0].title)
-	assert.Equal("Page 1", result[1].title)
-
-	result, err = s.RegularPages.Related(s.RegularPages[0])
-	assert.Len(result, 2)
-	assert.Equal("Page 2", result[0].title)
-	assert.Equal("Page 3", result[1].title)
-
-	result, err = s.RegularPages.RelatedIndices(s.RegularPages[0], "keywords")
-	assert.Len(result, 2)
-	assert.Equal("Page 2", result[0].title)
-	assert.Equal("Page 3", result[1].title)
-
-	result, err = s.RegularPages.RelatedTo(types.NewKeyValuesStrings("keywords", "bep", "rocks"))
-	assert.NoError(err)
-	assert.Len(result, 2)
-	assert.Equal("Page 2", result[0].title)
-	assert.Equal("Page 3", result[1].title)
-}
diff --git a/hugolib/pagination_test.go b/hugolib/pagination_test.go
deleted file mode 100644
index 5dbef609bdc..00000000000
--- a/hugolib/pagination_test.go
+++ /dev/null
@@ -1,579 +0,0 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
-	"fmt"
-	"html/template"
-	"path/filepath"
-	"strings"
-	"testing"
-
-	"github.com/gohugoio/hugo/deps"
-	"github.com/gohugoio/hugo/output"
-	"github.com/stretchr/testify/require"
-)
-
-func TestSplitPages(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-
-	pages := createTestPages(s, 21)
-	chunks := splitPages(pages, 5)
-	require.Equal(t, 5, len(chunks))
-
-	for i := 0; i < 4; i++ {
-		require.Equal(t, 5, chunks[i].Len())
-	}
-
-	lastChunk := chunks[4]
-	require.Equal(t, 1, lastChunk.Len())
-
-}
-
-func TestSplitPageGroups(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	pages := createTestPages(s, 21)
-	groups, _ := pages.GroupBy("Weight", "desc")
-	chunks := splitPageGroups(groups, 5)
-	require.Equal(t, 5, len(chunks))
-
-	firstChunk := chunks[0]
-
-	// alternate weight 5 and 10
-	if groups, ok := firstChunk.(PagesGroup); ok {
-		require.Equal(t, 5, groups.Len())
-		for _, pg := range groups {
-			// first group 10 in weight
-			require.Equal(t, 10, pg.Key)
-			for _, p := range pg.Pages {
-				require.True(t, p.fuzzyWordCount%2 == 0) // magic test
-			}
-		}
-	} else {
-		t.Fatal("Excepted PageGroup")
-	}
-
-	lastChunk := chunks[4]
-
-	if groups, ok := lastChunk.(PagesGroup); ok {
-		require.Equal(t, 1, groups.Len())
-		for _, pg := range groups {
-			// last should have 5 in weight
-			require.Equal(t, 5, pg.Key)
-			for _, p := range pg.Pages {
-				require.True(t, p.fuzzyWordCount%2 != 0) // magic test
-			}
-		}
-	} else {
-		t.Fatal("Excepted PageGroup")
-	}
-
-}
-
-func TestPager(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	pages := createTestPages(s, 21)
-	groups, _ := pages.GroupBy("Weight", "desc")
-
-	urlFactory := func(page int) string {
-		return fmt.Sprintf("page/%d/", page)
-	}
-
-	_, err := newPaginatorFromPages(pages, -1, urlFactory)
-	require.NotNil(t, err)
-
-	_, err = newPaginatorFromPageGroups(groups, -1, urlFactory)
-	require.NotNil(t, err)
-
-	pag, err := newPaginatorFromPages(pages, 5, urlFactory)
-	require.Nil(t, err)
-	doTestPages(t, pag)
-	first := pag.Pagers()[0].First()
-	require.Equal(t, "Pager 1", first.String())
-	require.NotEmpty(t, first.Pages())
-	require.Empty(t, first.PageGroups())
-
-	pag, err = newPaginatorFromPageGroups(groups, 5, urlFactory)
-	require.Nil(t, err)
-	doTestPages(t, pag)
-	first = pag.Pagers()[0].First()
-	require.NotEmpty(t, first.PageGroups())
-	require.Empty(t, first.Pages())
-
-}
-
-func doTestPages(t *testing.T, paginator *paginator) {
-
-	paginatorPages := paginator.Pagers()
-
-	require.Equal(t, 5, len(paginatorPages))
-	require.Equal(t, 21, paginator.TotalNumberOfElements())
-	require.Equal(t, 5, paginator.PageSize())
-	require.Equal(t, 5, paginator.TotalPages())
-
-	first := paginatorPages[0]
-	require.Equal(t, template.HTML("page/1/"), first.URL())
-	require.Equal(t, first, first.First())
-	require.True(t, first.HasNext())
-	require.Equal(t, paginatorPages[1], first.Next())
-	require.False(t, first.HasPrev())
-	require.Nil(t, first.Prev())
-	require.Equal(t, 5, first.NumberOfElements())
-	require.Equal(t, 1, first.PageNumber())
-
-	third := paginatorPages[2]
-	require.True(t, third.HasNext())
-	require.True(t, third.HasPrev())
-	require.Equal(t, paginatorPages[1], third.Prev())
-
-	last := paginatorPages[4]
-	require.Equal(t, template.HTML("page/5/"), last.URL())
-	require.Equal(t, last, last.Last())
-	require.False(t, last.HasNext())
-	require.Nil(t, last.Next())
-	require.True(t, last.HasPrev())
-	require.Equal(t, 1, last.NumberOfElements())
-	require.Equal(t, 5, last.PageNumber())
-}
-
-func TestPagerNoPages(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	pages := createTestPages(s, 0)
-	groups, _ := pages.GroupBy("Weight", "desc")
-
-	urlFactory := func(page int) string {
-		return fmt.Sprintf("page/%d/", page)
-	}
-
-	paginator, _ := newPaginatorFromPages(pages, 5, urlFactory)
-	doTestPagerNoPages(t, paginator)
-
-	first := paginator.Pagers()[0].First()
-	require.Empty(t, first.PageGroups())
-	require.Empty(t, first.Pages())
-
-	paginator, _ = newPaginatorFromPageGroups(groups, 5, urlFactory)
-	doTestPagerNoPages(t, paginator)
-
-	first = paginator.Pagers()[0].First()
-	require.Empty(t, first.PageGroups())
-	require.Empty(t, first.Pages())
-
-}
-
-func doTestPagerNoPages(t *testing.T, paginator *paginator) {
-	paginatorPages := paginator.Pagers()
-
-	require.Equal(t, 1, len(paginatorPages))
-	require.Equal(t, 0, paginator.TotalNumberOfElements())
-	require.Equal(t, 5, paginator.PageSize())
-	require.Equal(t, 0, paginator.TotalPages())
-
-	// pageOne should be nothing but the first
-	pageOne := paginatorPages[0]
-	require.NotNil(t, pageOne.First())
-	require.False(t, pageOne.HasNext())
-	require.False(t, pageOne.HasPrev())
-	require.Nil(t, pageOne.Next())
-	require.Equal(t, 1, len(pageOne.Pagers()))
-	require.Equal(t, 0, pageOne.Pages().Len())
-	require.Equal(t, 0, pageOne.NumberOfElements())
-	require.Equal(t, 0, pageOne.TotalNumberOfElements())
-	require.Equal(t, 0, pageOne.TotalPages())
-	require.Equal(t, 1, pageOne.PageNumber())
-	require.Equal(t, 5, pageOne.PageSize())
-
-}
-
-func TestPaginationURLFactory(t *testing.T) {
-	t.Parallel()
-	cfg, fs := newTestCfg()
-	cfg.Set("paginatePath", "zoo")
-
-	for _, uglyURLs := range []bool{false, true} {
-		for _, canonifyURLs := range []bool{false, true} {
-			t.Run(fmt.Sprintf("uglyURLs=%t,canonifyURLs=%t", uglyURLs, canonifyURLs), func(t *testing.T) {
-
-				tests := []struct {
-					name     string
-					d        targetPathDescriptor
-					baseURL  string
-					page     int
-					expected string
-				}{
-					{"HTML home page 32",
-						targetPathDescriptor{Kind: KindHome, Type: output.HTMLFormat}, "http://example.com/", 32, "/zoo/32/"},
-					{"JSON home page 42",
-						targetPathDescriptor{Kind: KindHome, Type: output.JSONFormat}, "http://example.com/", 42, "/zoo/42/"},
-					// Issue #1252
-					{"BaseURL with sub path",
-						targetPathDescriptor{Kind: KindHome, Type: output.HTMLFormat}, "http://example.com/sub/", 999, "/sub/zoo/999/"},
-				}
-
-				for _, test := range tests {
-					d := test.d
-					cfg.Set("baseURL", test.baseURL)
-					cfg.Set("canonifyURLs", canonifyURLs)
-					cfg.Set("uglyURLs", uglyURLs)
-					d.UglyURLs = uglyURLs
-
-					expected := test.expected
-
-					if canonifyURLs {
-						expected = strings.Replace(expected, "/sub", "", 1)
-					}
-
-					if uglyURLs {
-						expected = expected[:len(expected)-1] + "." + test.d.Type.MediaType.Suffix()
-					}
-
-					pathSpec := newTestPathSpec(fs, cfg)
-					d.PathSpec = pathSpec
-
-					factory := newPaginationURLFactory(d)
-
-					got := factory(test.page)
-
-					require.Equal(t, expected, got)
-
-				}
-			})
-		}
-	}
-}
-
-func TestPaginator(t *testing.T) {
-	t.Parallel()
-	for _, useViper := range []bool{false, true} {
-		doTestPaginator(t, useViper)
-	}
-}
-
-func doTestPaginator(t *testing.T, useViper bool) {
-
-	cfg, fs := newTestCfg()
-
-	pagerSize := 5
-	if useViper {
-		cfg.Set("paginate", pagerSize)
-	} else {
-		cfg.Set("paginate", -1)
-	}
-
-	s, err := NewSiteForCfg(deps.DepsCfg{Cfg: cfg, Fs: fs})
-	require.NoError(t, err)
-
-	pages := createTestPages(s, 12)
-	n1, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
-	n2, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
-	n1.data["Pages"] = pages
-
-	var paginator1 *Pager
-
-	if useViper {
-		paginator1, err = n1.Paginator()
-	} else {
-		paginator1, err = n1.Paginator(pagerSize)
-	}
-
-	require.Nil(t, err)
-	require.NotNil(t, paginator1)
-	require.Equal(t, 3, paginator1.TotalPages())
-	require.Equal(t, 12, paginator1.TotalNumberOfElements())
-
-	n2.paginator = paginator1.Next()
-	paginator2, err := n2.Paginator()
-	require.Nil(t, err)
-	require.Equal(t, paginator2, paginator1.Next())
-
-	n1.data["Pages"] = createTestPages(s, 1)
-	samePaginator, _ := n1.Paginator()
-	require.Equal(t, paginator1, samePaginator)
-
-	pp, _ := s.NewPage("test")
-	p, _ := newPageOutput(pp, false, false, output.HTMLFormat)
-
-	_, err = p.Paginator()
-	require.NotNil(t, err)
-}
-
-func TestPaginatorWithNegativePaginate(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t, "paginate", -1)
-	n1, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
-	_, err := n1.Paginator()
-	require.Error(t, err)
-}
-
-func TestPaginate(t *testing.T) {
-	t.Parallel()
-	for _, useViper := range []bool{false, true} {
-		doTestPaginate(t, useViper)
-	}
-}
-
-func TestPaginatorURL(t *testing.T) {
-	t.Parallel()
-	cfg, fs := newTestCfg()
-
-	cfg.Set("paginate", 2)
-	cfg.Set("paginatePath", "testing")
-
-	for i := 0; i < 10; i++ {
-		// Issue #2177, do not double encode URLs
-		writeSource(t, fs, filepath.Join("content", "阅读", fmt.Sprintf("page%d.md", (i+1))),
-			fmt.Sprintf(`---
-title: Page%d
----
-Conten%d
-`, (i+1), i+1))
-
-	}
-	writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), "<html><body>{{.Content}}</body></html>")
-	writeSource(t, fs, filepath.Join("layouts", "_default", "list.html"),
-		`
-<html><body>
-Count: {{ .Paginator.TotalNumberOfElements }}
-Pages: {{ .Paginator.TotalPages }}
-{{ range .Paginator.Pagers -}}
- {{ .PageNumber }}: {{ .URL }} 
-{{ end }}
-</body></html>`)
-
-	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
-
-	th := testHelper{s.Cfg, s.Fs, t}
-
-	th.assertFileContent(filepath.Join("public", "阅读", "testing", "2", "index.html"), "2: /%E9%98%85%E8%AF%BB/testing/2/")
-
-}
-
-func doTestPaginate(t *testing.T, useViper bool) {
-	pagerSize := 5
-
-	var (
-		s   *Site
-		err error
-	)
-
-	if useViper {
-		s = newTestSite(t, "paginate", pagerSize)
-	} else {
-		s = newTestSite(t, "paginate", -1)
-	}
-
-	pages := createTestPages(s, 6)
-	n1, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
-	n2, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
-
-	var paginator1, paginator2 *Pager
-
-	if useViper {
-		paginator1, err = n1.Paginate(pages)
-	} else {
-		paginator1, err = n1.Paginate(pages, pagerSize)
-	}
-
-	require.Nil(t, err)
-	require.NotNil(t, paginator1)
-	require.Equal(t, 2, paginator1.TotalPages())
-	require.Equal(t, 6, paginator1.TotalNumberOfElements())
-
-	n2.paginator = paginator1.Next()
-	if useViper {
-		paginator2, err = n2.Paginate(pages)
-	} else {
-		paginator2, err = n2.Paginate(pages, pagerSize)
-	}
-	require.Nil(t, err)
-	require.Equal(t, paginator2, paginator1.Next())
-
-	pp, err := s.NewPage("test")
-	p, _ := newPageOutput(pp, false, false, output.HTMLFormat)
-
-	_, err = p.Paginate(pages)
-	require.NotNil(t, err)
-}
-
-func TestInvalidOptions(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	n1, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
-
-	_, err := n1.Paginate(createTestPages(s, 1), 1, 2)
-	require.NotNil(t, err)
-	_, err = n1.Paginator(1, 2)
-	require.NotNil(t, err)
-	_, err = n1.Paginator(-1)
-	require.NotNil(t, err)
-}
-
-func TestPaginateWithNegativePaginate(t *testing.T) {
-	t.Parallel()
-	cfg, fs := newTestCfg()
-	cfg.Set("paginate", -1)
-
-	s, err := NewSiteForCfg(deps.DepsCfg{Cfg: cfg, Fs: fs})
-	require.NoError(t, err)
-
-	n, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
-
-	_, err = n.Paginate(createTestPages(s, 2))
-	require.NotNil(t, err)
-}
-
-func TestPaginatePages(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-
-	groups, _ := createTestPages(s, 31).GroupBy("Weight", "desc")
-	pd := targetPathDescriptor{Kind: KindHome, Type: output.HTMLFormat, PathSpec: s.PathSpec, Addends: "t"}
-
-	for i, seq := range []interface{}{createTestPages(s, 11), groups, WeightedPages{}, PageGroup{}, &Pages{}} {
-		v, err := paginatePages(pd, seq, 11)
-		require.NotNil(t, v, "Val %d", i)
-		require.Nil(t, err, "Err %d", i)
-	}
-	_, err := paginatePages(pd, Site{}, 11)
-	require.NotNil(t, err)
-
-}
-
-// Issue #993
-func TestPaginatorFollowedByPaginateShouldFail(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t, "paginate", 10)
-	n1, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
-	n2, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
-
-	_, err := n1.Paginator()
-	require.Nil(t, err)
-	_, err = n1.Paginate(createTestPages(s, 2))
-	require.NotNil(t, err)
-
-	_, err = n2.Paginate(createTestPages(s, 2))
-	require.Nil(t, err)
-
-}
-
-func TestPaginateFollowedByDifferentPaginateShouldFail(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t, "paginate", 10)
-
-	n1, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
-	n2, _ := newPageOutput(s.newHomePage(), false, false, output.HTMLFormat)
-
-	p1 := createTestPages(s, 2)
-	p2 := createTestPages(s, 10)
-
-	_, err := n1.Paginate(p1)
-	require.Nil(t, err)
-
-	_, err = n1.Paginate(p1)
-	require.Nil(t, err)
-
-	_, err = n1.Paginate(p2)
-	require.NotNil(t, err)
-
-	_, err = n2.Paginate(p2)
-	require.Nil(t, err)
-}
-
-func TestProbablyEqualPageLists(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	fivePages := createTestPages(s, 5)
-	zeroPages := createTestPages(s, 0)
-	zeroPagesByWeight, _ := createTestPages(s, 0).GroupBy("Weight", "asc")
-	fivePagesByWeight, _ := createTestPages(s, 5).GroupBy("Weight", "asc")
-	ninePagesByWeight, _ := createTestPages(s, 9).GroupBy("Weight", "asc")
-
-	for i, this := range []struct {
-		v1     interface{}
-		v2     interface{}
-		expect bool
-	}{
-		{nil, nil, true},
-		{"a", "b", true},
-		{"a", fivePages, false},
-		{fivePages, "a", false},
-		{fivePages, createTestPages(s, 2), false},
-		{fivePages, fivePages, true},
-		{zeroPages, zeroPages, true},
-		{fivePagesByWeight, fivePagesByWeight, true},
-		{zeroPagesByWeight, fivePagesByWeight, false},
-		{zeroPagesByWeight, zeroPagesByWeight, true},
-		{fivePagesByWeight, fivePages, false},
-		{fivePagesByWeight, ninePagesByWeight, false},
-	} {
-		result := probablyEqualPageLists(this.v1, this.v2)
-
-		if result != this.expect {
-			t.Errorf("[%d] got %t but expected %t", i, result, this.expect)
-
-		}
-	}
-}
-
-func TestPage(t *testing.T) {
-	t.Parallel()
-	urlFactory := func(page int) string {
-		return fmt.Sprintf("page/%d/", page)
-	}
-
-	s := newTestSite(t)
-
-	fivePages := createTestPages(s, 7)
-	fivePagesFuzzyWordCount, _ := createTestPages(s, 7).GroupBy("FuzzyWordCount", "asc")
-
-	p1, _ := newPaginatorFromPages(fivePages, 2, urlFactory)
-	p2, _ := newPaginatorFromPageGroups(fivePagesFuzzyWordCount, 2, urlFactory)
-
-	f1 := p1.pagers[0].First()
-	f2 := p2.pagers[0].First()
-
-	page11, _ := f1.page(1)
-	page1Nil, _ := f1.page(3)
-
-	page21, _ := f2.page(1)
-	page2Nil, _ := f2.page(3)
-
-	require.Equal(t, 3, page11.fuzzyWordCount)
-	require.Nil(t, page1Nil)
-
-	require.Equal(t, 3, page21.fuzzyWordCount)
-	require.Nil(t, page2Nil)
-}
-
-func createTestPages(s *Site, num int) Pages {
-	pages := make(Pages, num)
-
-	for i := 0; i < num; i++ {
-		p := s.newPage(filepath.FromSlash(fmt.Sprintf("/x/y/z/p%d.md", i)))
-		w := 5
-		if i%2 == 0 {
-			w = 10
-		}
-		p.fuzzyWordCount = i + 2
-		p.Weight = w
-		pages[i] = p
-
-	}
-
-	return pages
-}
diff --git a/hugolib/paths/themes.go b/hugolib/paths/themes.go
index 4718720e1d0..a526953f16e 100644
--- a/hugolib/paths/themes.go
+++ b/hugolib/paths/themes.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -75,7 +75,7 @@ func (c *themesCollector) add(name, configFilename string) (ThemeConfig, error)
 		var err error
 		cfg, err = config.FromFile(c.fs, configFilename)
 		if err != nil {
-			return tc, nil
+			return tc, err
 		}
 	}
 
diff --git a/hugolib/permalinker.go b/hugolib/permalinker.go
index 5e7a13a0252..29dad6ce4e4 100644
--- a/hugolib/permalinker.go
+++ b/hugolib/permalinker.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -14,8 +14,7 @@
 package hugolib
 
 var (
-	_ Permalinker = (*Page)(nil)
-	_ Permalinker = (*OutputFormat)(nil)
+	_ Permalinker = (*pageState)(nil)
 )
 
 // Permalinker provides permalinks of both the relative and absolute kind.
diff --git a/hugolib/permalinks.go b/hugolib/permalinks.go
deleted file mode 100644
index 3d261a113e3..00000000000
--- a/hugolib/permalinks.go
+++ /dev/null
@@ -1,213 +0,0 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
-	"errors"
-	"fmt"
-	"path"
-	"path/filepath"
-	"regexp"
-	"strconv"
-	"strings"
-
-	"github.com/gohugoio/hugo/helpers"
-)
-
-// pathPattern represents a string which builds up a URL from attributes
-type pathPattern string
-
-// pageToPermaAttribute is the type of a function which, given a page and a tag
-// can return a string to go in that position in the page (or an error)
-type pageToPermaAttribute func(*Page, string) (string, error)
-
-// PermalinkOverrides maps a section name to a PathPattern
-type PermalinkOverrides map[string]pathPattern
-
-// knownPermalinkAttributes maps :tags in a permalink specification to a
-// function which, given a page and the tag, returns the resulting string
-// to be used to replace that tag.
-var knownPermalinkAttributes map[string]pageToPermaAttribute
-
-var attributeRegexp = regexp.MustCompile(`:\w+`)
-
-// validate determines if a PathPattern is well-formed
-func (pp pathPattern) validate() bool {
-	fragments := strings.Split(string(pp[1:]), "/")
-	var bail = false
-	for i := range fragments {
-		if bail {
-			return false
-		}
-		if len(fragments[i]) == 0 {
-			bail = true
-			continue
-		}
-
-		matches := attributeRegexp.FindAllStringSubmatch(fragments[i], -1)
-		if matches == nil {
-			continue
-		}
-
-		for _, match := range matches {
-			k := strings.ToLower(match[0][1:])
-			if _, ok := knownPermalinkAttributes[k]; !ok {
-				return false
-			}
-		}
-	}
-	return true
-}
-
-type permalinkExpandError struct {
-	pattern pathPattern
-	section string
-	err     error
-}
-
-func (pee *permalinkExpandError) Error() string {
-	return fmt.Sprintf("error expanding %q section %q: %s", string(pee.pattern), pee.section, pee.err)
-}
-
-var (
-	errPermalinkIllFormed        = errors.New("permalink ill-formed")
-	errPermalinkAttributeUnknown = errors.New("permalink attribute not recognised")
-)
-
-// Expand on a PathPattern takes a Page and returns the fully expanded Permalink
-// or an error explaining the failure.
-func (pp pathPattern) Expand(p *Page) (string, error) {
-	if !pp.validate() {
-		return "", &permalinkExpandError{pattern: pp, section: "<all>", err: errPermalinkIllFormed}
-	}
-	sections := strings.Split(string(pp), "/")
-	for i, field := range sections {
-		if len(field) == 0 {
-			continue
-		}
-
-		matches := attributeRegexp.FindAllStringSubmatch(field, -1)
-
-		if matches == nil {
-			continue
-		}
-
-		newField := field
-
-		for _, match := range matches {
-			attr := match[0][1:]
-			callback, ok := knownPermalinkAttributes[attr]
-
-			if !ok {
-				return "", &permalinkExpandError{pattern: pp, section: strconv.Itoa(i), err: errPermalinkAttributeUnknown}
-			}
-
-			newAttr, err := callback(p, attr)
-
-			if err != nil {
-				return "", &permalinkExpandError{pattern: pp, section: strconv.Itoa(i), err: err}
-			}
-
-			newField = strings.Replace(newField, match[0], newAttr, 1)
-		}
-
-		sections[i] = newField
-	}
-	return strings.Join(sections, "/"), nil
-}
-
-func pageToPermalinkDate(p *Page, dateField string) (string, error) {
-	// a Page contains a Node which provides a field Date, time.Time
-	switch dateField {
-	case "year":
-		return strconv.Itoa(p.Date.Year()), nil
-	case "month":
-		return fmt.Sprintf("%02d", int(p.Date.Month())), nil
-	case "monthname":
-		return p.Date.Month().String(), nil
-	case "day":
-		return fmt.Sprintf("%02d", p.Date.Day()), nil
-	case "weekday":
-		return strconv.Itoa(int(p.Date.Weekday())), nil
-	case "weekdayname":
-		return p.Date.Weekday().String(), nil
-	case "yearday":
-		return strconv.Itoa(p.Date.YearDay()), nil
-	}
-	//TODO: support classic strftime escapes too
-	// (and pass those through despite not being in the map)
-	panic("coding error: should not be here")
-}
-
-// pageToPermalinkTitle returns the URL-safe form of the title
-func pageToPermalinkTitle(p *Page, _ string) (string, error) {
-	// Page contains Node which has Title
-	// (also contains URLPath which has Slug, sometimes)
-	return p.s.PathSpec.URLize(p.title), nil
-}
-
-// pageToPermalinkFilename returns the URL-safe form of the filename
-func pageToPermalinkFilename(p *Page, _ string) (string, error) {
-	name := p.File.TranslationBaseName()
-	if name == "index" {
-		// Page bundles; the directory name will hopefully have a better name.
-		dir := strings.TrimSuffix(p.File.Dir(), helpers.FilePathSeparator)
-		_, name = filepath.Split(dir)
-	}
-
-	return p.s.PathSpec.URLize(name), nil
-}
-
-// if the page has a slug, return the slug, else return the title
-func pageToPermalinkSlugElseTitle(p *Page, a string) (string, error) {
-	if p.Slug != "" {
-		// Don't start or end with a -
-		// TODO(bep) this doesn't look good... Set the Slug once.
-		if strings.HasPrefix(p.Slug, "-") {
-			p.Slug = p.Slug[1:len(p.Slug)]
-		}
-
-		if strings.HasSuffix(p.Slug, "-") {
-			p.Slug = p.Slug[0 : len(p.Slug)-1]
-		}
-		return p.s.PathSpec.URLize(p.Slug), nil
-	}
-	return pageToPermalinkTitle(p, a)
-}
-
-func pageToPermalinkSection(p *Page, _ string) (string, error) {
-	return p.Section(), nil
-}
-
-func pageToPermalinkSections(p *Page, _ string) (string, error) {
-	return path.Join(p.CurrentSection().sections...), nil
-}
-
-func init() {
-	knownPermalinkAttributes = map[string]pageToPermaAttribute{
-		"year":        pageToPermalinkDate,
-		"month":       pageToPermalinkDate,
-		"monthname":   pageToPermalinkDate,
-		"day":         pageToPermalinkDate,
-		"weekday":     pageToPermalinkDate,
-		"weekdayname": pageToPermalinkDate,
-		"yearday":     pageToPermalinkDate,
-		"section":     pageToPermalinkSection,
-		"sections":    pageToPermalinkSections,
-		"title":       pageToPermalinkTitle,
-		"slug":        pageToPermalinkSlugElseTitle,
-		"filename":    pageToPermalinkFilename,
-	}
-
-}
diff --git a/hugolib/permalinks_test.go b/hugolib/permalinks_test.go
deleted file mode 100644
index 7bc24295584..00000000000
--- a/hugolib/permalinks_test.go
+++ /dev/null
@@ -1,85 +0,0 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package hugolib
-
-import (
-	"path/filepath"
-	"strings"
-	"testing"
-)
-
-// testdataPermalinks is used by a couple of tests; the expandsTo content is
-// subject to the data in simplePageJSON.
-var testdataPermalinks = []struct {
-	spec      string
-	valid     bool
-	expandsTo string
-}{
-	{":title", true, "spf13-vim-3.0-release-and-new-website"},
-	{"/:year-:month-:title", true, "/2012-04-spf13-vim-3.0-release-and-new-website"},
-
-	{"/:year/:yearday/:month/:monthname/:day/:weekday/:weekdayname/", true, "/2012/97/04/April/06/5/Friday/"}, // Dates
-	{"/:section/", true, "/blue/"},                                // Section
-	{"/:title/", true, "/spf13-vim-3.0-release-and-new-website/"}, // Title
-	{"/:slug/", true, "/spf13-vim-3-0-release-and-new-website/"},  // Slug
-	{"/:filename/", true, "/test-page/"},                          // Filename
-	// TODO(moorereason): need test scaffolding for this.
-	//{"/:sections/", false, "/blue/"},                              // Sections
-
-	// Failures
-	{"/blog/:fred", false, ""},
-	{"/:year//:title", false, ""},
-}
-
-func TestPermalinkValidation(t *testing.T) {
-	t.Parallel()
-	for _, item := range testdataPermalinks {
-		pp := pathPattern(item.spec)
-		have := pp.validate()
-		if have == item.valid {
-			continue
-		}
-		var howBad string
-		if have {
-			howBad = "validates but should not have"
-		} else {
-			howBad = "should have validated but did not"
-		}
-		t.Errorf("permlink spec %q %s", item.spec, howBad)
-	}
-}
-
-func TestPermalinkExpansion(t *testing.T) {
-	t.Parallel()
-	s := newTestSite(t)
-	page, err := s.newPageFrom(strings.NewReader(simplePageJSON), filepath.FromSlash("blue/test-page.md"))
-
-	if err != nil {
-		t.Fatalf("failed before we began, could not parse simplePageJSON: %s", err)
-	}
-	for _, item := range testdataPermalinks {
-		if !item.valid {
-			continue
-		}
-		pp := pathPattern(item.spec)
-		result, err := pp.Expand(page)
-		if err != nil {
-			t.Errorf("failed to expand page: %s", err)
-			continue
-		}
-		if result != item.expandsTo {
-			t.Errorf("expansion mismatch!\n\tExpected: %q\n\tReceived: %q", item.expandsTo, result)
-		}
-	}
-}
diff --git a/hugolib/resource_chain_test.go b/hugolib/resource_chain_test.go
index f53ab4966f0..e22121b7726 100644
--- a/hugolib/resource_chain_test.go
+++ b/hugolib/resource_chain_test.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -39,7 +39,7 @@ func TestSCSSWithIncludePaths(t *testing.T) {
 
 	v := viper.New()
 	v.Set("workingDir", workDir)
-	b := newTestSitesBuilder(t).WithLogger(loggers.NewWarningLogger())
+	b := newTestSitesBuilder(t).WithLogger(loggers.NewErrorLogger())
 	b.WithViper(v)
 	b.WithWorkingDir(workDir)
 	// Need to use OS fs for this.
@@ -94,7 +94,7 @@ func TestSCSSWithThemeOverrides(t *testing.T) {
 	v := viper.New()
 	v.Set("workingDir", workDir)
 	v.Set("theme", theme)
-	b := newTestSitesBuilder(t).WithLogger(loggers.NewWarningLogger())
+	b := newTestSitesBuilder(t).WithLogger(loggers.NewErrorLogger())
 	b.WithViper(v)
 	b.WithWorkingDir(workDir)
 	// Need to use OS fs for this.
@@ -367,7 +367,7 @@ CSV2: {{ $csv2 }}
 			continue
 		}
 
-		b := newTestSitesBuilder(t).WithLogger(loggers.NewWarningLogger())
+		b := newTestSitesBuilder(t).WithLogger(loggers.NewErrorLogger())
 		b.WithSimpleConfigFile()
 		b.WithContent("_index.md", `
 ---
diff --git a/hugolib/rss_test.go b/hugolib/rss_test.go
index db26c7d2d27..f5598bef220 100644
--- a/hugolib/rss_test.go
+++ b/hugolib/rss_test.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -47,7 +47,7 @@ func TestRSSOutput(t *testing.T) {
 	// Section RSS
 	th.assertFileContent(filepath.Join("public", "sect", rssURI), "<?xml", "rss version", "Sects on RSSTest")
 	// Taxonomy RSS
-	th.assertFileContent(filepath.Join("public", "categories", "hugo", rssURI), "<?xml", "rss version", "Hugo on RSSTest")
+	th.assertFileContent(filepath.Join("public", "categories", "hugo", rssURI), "<?xml", "rss version", "hugo on RSSTest")
 
 	// RSS Item Limit
 	content := readDestination(t, fs, filepath.Join("public", rssURI))
diff --git a/hugolib/shortcode.go b/hugolib/shortcode.go
index cd2f268f123..68455d30f46 100644
--- a/hugolib/shortcode.go
+++ b/hugolib/shortcode.go
@@ -1,4 +1,4 @@
-// Copyright 2017 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -15,12 +15,14 @@ package hugolib
 
 import (
 	"bytes"
-	"errors"
 	"fmt"
+	"strconv"
+
 	"html/template"
 	"path"
 
 	"github.com/gohugoio/hugo/common/herrors"
+	"github.com/pkg/errors"
 
 	"reflect"
 
@@ -28,6 +30,7 @@ import (
 	"sort"
 
 	"github.com/gohugoio/hugo/parser/pageparser"
+	"github.com/gohugoio/hugo/resources/page"
 
 	_errors "github.com/pkg/errors"
 
@@ -39,8 +42,6 @@ import (
 	"github.com/gohugoio/hugo/common/urls"
 	"github.com/gohugoio/hugo/output"
 
-	"github.com/gohugoio/hugo/media"
-
 	bp "github.com/gohugoio/hugo/bufferpool"
 	"github.com/gohugoio/hugo/helpers"
 	"github.com/gohugoio/hugo/tpl"
@@ -48,7 +49,7 @@ import (
 
 var (
 	_ urls.RefLinker  = (*ShortcodeWithPage)(nil)
-	_ pageContainer   = (*ShortcodeWithPage)(nil)
+	_ pageWrapper     = (*ShortcodeWithPage)(nil)
 	_ text.Positioner = (*ShortcodeWithPage)(nil)
 )
 
@@ -56,7 +57,7 @@ var (
 type ShortcodeWithPage struct {
 	Params        interface{}
 	Inner         template.HTML
-	Page          *PageWithoutContent
+	Page          page.Page
 	Parent        *ShortcodeWithPage
 	Name          string
 	IsNamedParams bool
@@ -77,26 +78,28 @@ type ShortcodeWithPage struct {
 // may be expensive to calculate, so only use this in error situations.
 func (scp *ShortcodeWithPage) Position() text.Position {
 	scp.posInit.Do(func() {
-		scp.pos = scp.Page.posFromPage(scp.posOffset)
+		if p, ok := mustUnwrapPage(scp.Page).(pageContext); ok {
+			scp.pos = p.posOffset(scp.posOffset)
+		}
 	})
 	return scp.pos
 }
 
 // Site returns information about the current site.
-func (scp *ShortcodeWithPage) Site() *SiteInfo {
-	return scp.Page.Site
+func (scp *ShortcodeWithPage) Site() page.Site {
+	return scp.Page.Site()
 }
 
 // Ref is a shortcut to the Ref method on Page. It passes itself as a context
 // to get better error messages.
 func (scp *ShortcodeWithPage) Ref(args map[string]interface{}) (string, error) {
-	return scp.Page.ref(args, scp)
+	return scp.Page.RefFrom(args, scp)
 }
 
 // RelRef is a shortcut to the RelRef method on Page. It passes itself as a context
 // to get better error messages.
 func (scp *ShortcodeWithPage) RelRef(args map[string]interface{}) (string, error) {
-	return scp.Page.relRef(args, scp)
+	return scp.Page.RelRefFrom(args, scp)
 }
 
 // Scratch returns a scratch-pad scoped for this shortcode. This can be used
@@ -159,12 +162,16 @@ func (scp *ShortcodeWithPage) Get(key interface{}) interface{} {
 
 }
 
-func (scp *ShortcodeWithPage) page() *Page {
-	return scp.Page.Page
+func (scp *ShortcodeWithPage) page() page.Page {
+	return scp.Page
 }
 
 // Note - this value must not contain any markup syntax
-const shortcodePlaceholderPrefix = "HUGOSHORTCODE"
+const shortcodePlaceholderPrefix = "HAHAHUGOSHORTCODE"
+
+func createShortcodePlaceholder(id string, ordinal int) string {
+	return shortcodePlaceholderPrefix + "-" + id + strconv.Itoa(ordinal) + "-HBHB"
+}
 
 type shortcode struct {
 	name      string
@@ -174,8 +181,24 @@ type shortcode struct {
 	params    interface{}   // map or array
 	ordinal   int
 	err       error
-	doMarkup  bool
-	pos       int // the position in bytes in the source file
+
+	info tpl.Info
+
+	// If set, the rendered shortcode is sent as part of the surrounding content
+	// to Blackfriday and similar.
+	// Before Hug0 0.55 we didn't send any shortcode output to the markup
+	// renderer, and this flag told Hugo to process the {{ .Inner }} content
+	// separately.
+	// The old behaviour can be had by starting your shortcode template with:
+	//    {{ $_hugo_config := `{ "version": 1 }`}}
+	doMarkup bool
+
+	// the placeholder in the source when passed to Blackfriday etc.
+	// This also identifies the rendered shortcode.
+	placeholder string
+
+	pos    int // the position in bytes in the source file
+	length int // the length in bytes in the source file
 }
 
 func (s shortcode) innerString() string {
@@ -214,193 +237,92 @@ func (sc shortcode) String() string {
 	return fmt.Sprintf("%s(%q, %t){%s}", sc.name, params, sc.doMarkup, sc.inner)
 }
 
-// We may have special shortcode templates for AMP etc.
-// Note that in the below, OutputFormat may be empty.
-// We will try to look for the most specific shortcode template available.
-type scKey struct {
-	Lang                 string
-	OutputFormat         string
-	Suffix               string
-	ShortcodePlaceholder string
-}
-
-func newScKey(m media.Type, shortcodeplaceholder string) scKey {
-	return scKey{Suffix: m.Suffix(), ShortcodePlaceholder: shortcodeplaceholder}
-}
-
-func newScKeyFromLangAndOutputFormat(lang string, o output.Format, shortcodeplaceholder string) scKey {
-	return scKey{Lang: lang, Suffix: o.MediaType.Suffix(), OutputFormat: o.Name, ShortcodePlaceholder: shortcodeplaceholder}
-}
-
-func newDefaultScKey(shortcodeplaceholder string) scKey {
-	return newScKey(media.HTMLType, shortcodeplaceholder)
-}
-
 type shortcodeHandler struct {
-	init sync.Once
-
-	p *PageWithoutContent
-
-	// This is all shortcode rendering funcs for all potential output formats.
-	contentShortcodes *orderedMap
+	p *pageState
 
-	// This map contains the new or changed set of shortcodes that need
-	// to be rendered for the current output format.
-	contentShortcodesDelta *orderedMap
+	s *Site
 
-	// This maps the shorcode placeholders with the rendered content.
-	// We will do (potential) partial re-rendering per output format,
-	// so keep this for the unchanged.
-	renderedShortcodes map[string]string
-
-	// Maps the shortcodeplaceholder with the actual shortcode.
-	shortcodes *orderedMap
+	// Ordered list of shortcodes for a page.
+	shortcodes []*shortcode
 
 	// All the shortcode names in this set.
 	nameSet map[string]bool
 
-	placeholderID   int
-	placeholderFunc func() string
-
+	// Configuration
 	enableInlineShortcodes bool
 }
 
-func (s *shortcodeHandler) nextPlaceholderID() int {
-	s.placeholderID++
-	return s.placeholderID
-}
+func newShortcodeHandler(p *pageState, s *Site, placeholderFunc func() string) *shortcodeHandler {
 
-func (s *shortcodeHandler) createShortcodePlaceholder() string {
-	return s.placeholderFunc()
-}
-
-func newShortcodeHandler(p *Page) *shortcodeHandler {
-
-	s := &shortcodeHandler{
-		p:                      p.withoutContent(),
-		enableInlineShortcodes: p.s.enableInlineShortcodes,
-		contentShortcodes:      newOrderedMap(),
-		shortcodes:             newOrderedMap(),
+	sh := &shortcodeHandler{
+		p:                      p,
+		s:                      s,
+		enableInlineShortcodes: s.enableInlineShortcodes,
+		shortcodes:             make([]*shortcode, 0, 4),
 		nameSet:                make(map[string]bool),
-		renderedShortcodes:     make(map[string]string),
-	}
-
-	placeholderFunc := p.s.shortcodePlaceholderFunc
-	if placeholderFunc == nil {
-		placeholderFunc = func() string {
-			return fmt.Sprintf("HAHA%s-%p-%d-HBHB", shortcodePlaceholderPrefix, p, s.nextPlaceholderID())
-		}
-
-	}
-	s.placeholderFunc = placeholderFunc
-	return s
-}
-
-// TODO(bep) make it non-global
-var isInnerShortcodeCache = struct {
-	sync.RWMutex
-	m map[string]bool
-}{m: make(map[string]bool)}
-
-// to avoid potential costly look-aheads for closing tags we look inside the template itself
-// we could change the syntax to self-closing tags, but that would make users cry
-// the value found is cached
-func isInnerShortcode(t tpl.TemplateExecutor) (bool, error) {
-	isInnerShortcodeCache.RLock()
-	m, ok := isInnerShortcodeCache.m[t.Name()]
-	isInnerShortcodeCache.RUnlock()
-
-	if ok {
-		return m, nil
 	}
 
-	isInnerShortcodeCache.Lock()
-	defer isInnerShortcodeCache.Unlock()
-	match, _ := regexp.MatchString("{{.*?\\.Inner.*?}}", t.Tree())
-	isInnerShortcodeCache.m[t.Name()] = match
-
-	return match, nil
-}
-
-func clearIsInnerShortcodeCache() {
-	isInnerShortcodeCache.Lock()
-	defer isInnerShortcodeCache.Unlock()
-	isInnerShortcodeCache.m = make(map[string]bool)
+	return sh
 }
 
-const innerNewlineRegexp = "\n"
-const innerCleanupRegexp = `\A<p>(.*)</p>\n\z`
-const innerCleanupExpand = "$1"
-
-func (s *shortcodeHandler) prepareShortcodeForPage(placeholder string, sc *shortcode, parent *ShortcodeWithPage, p *PageWithoutContent) map[scKey]func() (string, error) {
-	m := make(map[scKey]func() (string, error))
-	lang := p.Lang()
-
-	if sc.isInline {
-		key := newScKeyFromLangAndOutputFormat(lang, p.outputFormats[0], placeholder)
-		m[key] = func() (string, error) {
-			return renderShortcode(key, sc, nil, p)
-
-		}
-
-		return m
-
-	}
-
-	for _, f := range p.outputFormats {
-		// The most specific template will win.
-		key := newScKeyFromLangAndOutputFormat(lang, f, placeholder)
-		m[key] = func() (string, error) {
-			return renderShortcode(key, sc, nil, p)
-		}
-	}
-
-	return m
-}
+const (
+	innerNewlineRegexp = "\n"
+	innerCleanupRegexp = `\A<p>(.*)</p>\n\z`
+	innerCleanupExpand = "$1"
+)
 
 func renderShortcode(
-	tmplKey scKey,
+	level int,
+	s *Site,
+	tplVariants tpl.TemplateVariants,
 	sc *shortcode,
 	parent *ShortcodeWithPage,
-	p *PageWithoutContent) (string, error) {
+	p *pageState) (string, bool, error) {
 
 	var tmpl tpl.Template
 
+	// Tracks whether this shortcode or any of its children has template variations
+	// in other languages or output formats. We are currently only interested in
+	// the output formats, so we may get some false positives -- we
+	// should improve on that.
+	var hasVariants bool
+
 	if sc.isInline {
 		if !p.s.enableInlineShortcodes {
-			return "", nil
+			return "", false, nil
 		}
-		templName := path.Join("_inline_shortcode", p.Path(), sc.name)
+		templName := path.Join("_inline_shortcode", p.File().Path(), sc.name)
 		if sc.isClosing {
 			templStr := sc.innerString()
 
 			var err error
-			tmpl, err = p.s.TextTmpl.Parse(templName, templStr)
+			tmpl, err = s.TextTmpl.Parse(templName, templStr)
 			if err != nil {
 				fe := herrors.ToFileError("html", err)
-				l1, l2 := p.posFromPage(sc.pos).LineNumber, fe.Position().LineNumber
+				l1, l2 := p.posOffset(sc.pos).LineNumber, fe.Position().LineNumber
 				fe = herrors.ToFileErrorWithLineNumber(fe, l1+l2-1)
-				return "", p.errWithFileContext(fe)
+				return "", false, p.wrapError(fe)
 			}
 
 		} else {
 			// Re-use of shortcode defined earlier in the same page.
 			var found bool
-			tmpl, found = p.s.TextTmpl.Lookup(templName)
+			tmpl, found = s.TextTmpl.Lookup(templName)
 			if !found {
-				return "", _errors.Errorf("no earlier definition of shortcode %q found", sc.name)
+				return "", false, _errors.Errorf("no earlier definition of shortcode %q found", sc.name)
 			}
 		}
 	} else {
-		tmpl = getShortcodeTemplateForTemplateKey(tmplKey, sc.name, p.s.Tmpl)
-	}
-
-	if tmpl == nil {
-		p.s.Log.ERROR.Printf("Unable to locate template for shortcode %q in page %q", sc.name, p.Path())
-		return "", nil
+		var found, more bool
+		tmpl, found, more = s.Tmpl.LookupVariant(sc.name, tplVariants)
+		if !found {
+			s.Log.ERROR.Printf("Unable to locate template for shortcode %q in page %q", sc.name, p.File().Path())
+			return "", false, nil
+		}
+		hasVariants = hasVariants || more
 	}
 
-	data := &ShortcodeWithPage{Ordinal: sc.ordinal, posOffset: sc.pos, Params: sc.params, Page: p, Parent: parent, Name: sc.name}
+	data := &ShortcodeWithPage{Ordinal: sc.ordinal, posOffset: sc.pos, Params: sc.params, Page: newPageForShortcode(p), Parent: parent, Name: sc.name}
 	if sc.params != nil {
 		data.IsNamedParams = reflect.TypeOf(sc.params).Kind() == reflect.Map
 	}
@@ -408,32 +330,35 @@ func renderShortcode(
 	if len(sc.inner) > 0 {
 		var inner string
 		for _, innerData := range sc.inner {
-			switch innerData.(type) {
+			switch innerData := innerData.(type) {
 			case string:
-				inner += innerData.(string)
+				inner += innerData
 			case *shortcode:
-				s, err := renderShortcode(tmplKey, innerData.(*shortcode), data, p)
+				s, more, err := renderShortcode(level+1, s, tplVariants, innerData, data, p)
 				if err != nil {
-					return "", err
+					return "", false, err
 				}
+				hasVariants = hasVariants || more
 				inner += s
 			default:
-				p.s.Log.ERROR.Printf("Illegal state on shortcode rendering of %q in page %q. Illegal type in inner data: %s ",
-					sc.name, p.Path(), reflect.TypeOf(innerData))
-				return "", nil
+				s.Log.ERROR.Printf("Illegal state on shortcode rendering of %q in page %q. Illegal type in inner data: %s ",
+					sc.name, p.File().Path(), reflect.TypeOf(innerData))
+				return "", false, nil
 			}
 		}
 
-		if sc.doMarkup {
-			newInner := p.s.ContentSpec.RenderBytes(&helpers.RenderingContext{
+		// Pre Hugo 0.55 this was the behaviour even for the outer-most
+		// shortcode.
+		if sc.doMarkup && (level > 0 || sc.info.Config.Version == 1) {
+			newInner := s.ContentSpec.RenderBytes(&helpers.RenderingContext{
 				Content:      []byte(inner),
-				PageFmt:      p.Markup,
+				PageFmt:      p.m.markup,
 				Cfg:          p.Language(),
-				DocumentID:   p.UniqueID(),
-				DocumentName: p.Path(),
+				DocumentID:   p.File().UniqueID(),
+				DocumentName: p.File().Path(),
 				Config:       p.getRenderingConfig()})
 
-			// If the type is “unknown” or “markdown”, we assume the markdown
+			// If the type is “” (unknown) or “markdown”, we assume the markdown
 			// generation has been performed. Given the input: `a line`, markdown
 			// specifies the HTML `<p>a line</p>\n`. When dealing with documents as a
 			// whole, this is OK. When dealing with an `{{ .Inner }}` block in Hugo,
@@ -442,12 +367,9 @@ func renderShortcode(
 			// 1.  Check to see if inner has a newline in it. If so, the Inner data is
 			//     unchanged.
 			// 2   If inner does not have a newline, strip the wrapping <p> block and
-			//     the newline. This was previously tricked out by wrapping shortcode
-			//     substitutions in <div>HUGOSHORTCODE-1</div> which prevents the
-			//     generation, but means that you can’t use shortcodes inside of
-			//     markdown structures itself (e.g., `[foo]({{% ref foo.md %}})`).
-			switch p.Markup {
-			case "unknown", "markdown":
+			//     the newline.
+			switch p.m.markup {
+			case "", "markdown":
 				if match, _ := regexp.MatchString(innerNewlineRegexp, inner); !match {
 					cleaner, err := regexp.Compile(innerCleanupRegexp)
 
@@ -465,147 +387,71 @@ func renderShortcode(
 
 	}
 
-	s, err := renderShortcodeWithPage(tmpl, data)
+	result, err := renderShortcodeWithPage(tmpl, data)
 
 	if err != nil && sc.isInline {
 		fe := herrors.ToFileError("html", err)
 		l1, l2 := p.posFromPage(sc.pos).LineNumber, fe.Position().LineNumber
 		fe = herrors.ToFileErrorWithLineNumber(fe, l1+l2-1)
-		return "", fe
-	}
-
-	return s, err
-}
-
-// The delta represents new output format-versions of the shortcodes,
-// which, combined with the ones that do not have alternative representations,
-// builds a complete set ready for a full rebuild of the Page content.
-// This method returns false if there are no new shortcode variants in the
-// current rendering context's output format. This mean we can safely reuse
-// the content from the previous output format, if any.
-func (s *shortcodeHandler) updateDelta() bool {
-	s.init.Do(func() {
-		s.contentShortcodes = s.createShortcodeRenderers(s.p.withoutContent())
-	})
-
-	if !s.p.shouldRenderTo(s.p.s.rc.Format) {
-		// TODO(bep) add test for this re translations
-		return false
+		return "", false, fe
 	}
-	of := s.p.s.rc.Format
-	contentShortcodes := s.contentShortcodesForOutputFormat(of)
 
-	if s.contentShortcodesDelta == nil || s.contentShortcodesDelta.Len() == 0 {
-		s.contentShortcodesDelta = contentShortcodes
-		return true
-	}
-
-	delta := newOrderedMap()
-
-	for _, k := range contentShortcodes.Keys() {
-		if !s.contentShortcodesDelta.Contains(k) {
-			v, _ := contentShortcodes.Get(k)
-			delta.Add(k, v)
-		}
-	}
-
-	s.contentShortcodesDelta = delta
-
-	return delta.Len() > 0
+	return result, hasVariants, err
 }
 
-func (s *shortcodeHandler) clearDelta() {
-	if s == nil {
-		return
-	}
-	s.contentShortcodesDelta = newOrderedMap()
+func (s *shortcodeHandler) hasShortcodes() bool {
+	return len(s.shortcodes) > 0
 }
 
-func (s *shortcodeHandler) contentShortcodesForOutputFormat(f output.Format) *orderedMap {
-	contentShortcodesForOuputFormat := newOrderedMap()
-	lang := s.p.Lang()
-
-	for _, key := range s.shortcodes.Keys() {
-		shortcodePlaceholder := key.(string)
+func (s *shortcodeHandler) renderShortcodesForPage(p *pageState, f output.Format) (map[string]string, bool, error) {
 
-		key := newScKeyFromLangAndOutputFormat(lang, f, shortcodePlaceholder)
-		renderFn, found := s.contentShortcodes.Get(key)
-
-		if !found {
-			key.OutputFormat = ""
-			renderFn, found = s.contentShortcodes.Get(key)
-		}
-
-		// Fall back to HTML
-		if !found && key.Suffix != "html" {
-			key.Suffix = "html"
-			renderFn, found = s.contentShortcodes.Get(key)
-			if !found {
-				key.OutputFormat = "HTML"
-				renderFn, found = s.contentShortcodes.Get(key)
-			}
-		}
+	rendered := make(map[string]string)
 
-		if !found {
-			panic(fmt.Sprintf("Shortcode %q could not be found", shortcodePlaceholder))
-		}
-		contentShortcodesForOuputFormat.Add(newScKeyFromLangAndOutputFormat(lang, f, shortcodePlaceholder), renderFn)
+	tplVariants := tpl.TemplateVariants{
+		Language:     p.Language().Lang,
+		OutputFormat: f,
 	}
 
-	return contentShortcodesForOuputFormat
-}
-
-func (s *shortcodeHandler) executeShortcodesForDelta(p *PageWithoutContent) error {
+	var hasVariants bool
 
-	for _, k := range s.contentShortcodesDelta.Keys() {
-		render := s.contentShortcodesDelta.getShortcodeRenderer(k)
-		renderedShortcode, err := render()
+	for _, v := range s.shortcodes {
+		s, more, err := renderShortcode(0, s.s, tplVariants, v, nil, p)
 		if err != nil {
-			sc := s.shortcodes.getShortcode(k.(scKey).ShortcodePlaceholder)
-			if sc != nil {
-				err = p.errWithFileContext(p.parseError(_errors.Wrapf(err, "failed to render shortcode %q", sc.name), p.source.parsed.Input(), sc.pos))
-			}
-
-			p.s.SendError(err)
-			continue
+			err = p.parseError(_errors.Wrapf(err, "failed to render shortcode %q", v.name), p.source.parsed.Input(), v.pos)
+			return nil, false, err
 		}
+		hasVariants = hasVariants || more
+		rendered[v.placeholder] = s
 
-		s.renderedShortcodes[k.(scKey).ShortcodePlaceholder] = renderedShortcode
 	}
 
-	return nil
-
+	return rendered, hasVariants, nil
 }
 
-func (s *shortcodeHandler) createShortcodeRenderers(p *PageWithoutContent) *orderedMap {
-
-	shortcodeRenderers := newOrderedMap()
+var errShortCodeIllegalState = errors.New("Illegal shortcode state")
 
-	for _, k := range s.shortcodes.Keys() {
-		v := s.shortcodes.getShortcode(k)
-		prepared := s.prepareShortcodeForPage(k.(string), v, nil, p)
-		for kk, vv := range prepared {
-			shortcodeRenderers.Add(kk, vv)
-		}
+func (s *shortcodeHandler) parseError(err error, input []byte, pos int) error {
+	if s.p != nil {
+		return s.p.parseError(err, input, pos)
 	}
-
-	return shortcodeRenderers
+	return err
 }
 
-var errShortCodeIllegalState = errors.New("Illegal shortcode state")
-
 // pageTokens state:
 // - before: positioned just before the shortcode start
 // - after: shortcode(s) consumed (plural when they are nested)
-func (s *shortcodeHandler) extractShortcode(ordinal int, pt *pageparser.Iterator, p *Page) (*shortcode, error) {
+func (s *shortcodeHandler) extractShortcode(ordinal, level int, pt *pageparser.Iterator) (*shortcode, error) {
+	if s == nil {
+		panic("handler nil")
+	}
 	sc := &shortcode{ordinal: ordinal}
-	var isInner = false
 
 	var cnt = 0
 	var nestedOrdinal = 0
+	var nextLevel = level + 1
 
 	fail := func(err error, i pageparser.Item) error {
-		return p.parseError(err, pt.Input(), i.Pos)
+		return s.parseError(err, pt.Input(), i.Pos)
 	}
 
 Loop:
@@ -613,9 +459,6 @@ Loop:
 		currItem := pt.Next()
 		switch {
 		case currItem.IsLeftShortcodeDelim():
-			if sc.pos == 0 {
-				sc.pos = currItem.Pos
-			}
 			next := pt.Peek()
 			if next.IsShortcodeClose() {
 				continue
@@ -624,7 +467,7 @@ Loop:
 			if cnt > 0 {
 				// nested shortcode; append it to inner content
 				pt.Backup()
-				nested, err := s.extractShortcode(nestedOrdinal, pt, p)
+				nested, err := s.extractShortcode(nestedOrdinal, nextLevel, pt)
 				nestedOrdinal++
 				if nested.name != "" {
 					s.nameSet[nested.name] = true
@@ -644,13 +487,13 @@ Loop:
 		case currItem.IsRightShortcodeDelim():
 			// we trust the template on this:
 			// if there's no inner, we're done
-			if !sc.isInline && !isInner {
+			if !sc.isInline && !sc.info.IsInner {
 				return sc, nil
 			}
 
 		case currItem.IsShortcodeClose():
 			next := pt.Peek()
-			if !sc.isInline && !isInner {
+			if !sc.isInline && !sc.info.IsInner {
 				if next.IsError() {
 					// return that error, more specific
 					continue
@@ -670,24 +513,21 @@ Loop:
 		case currItem.IsText():
 			sc.inner = append(sc.inner, currItem.ValStr())
 		case currItem.IsShortcodeName():
+
 			sc.name = currItem.ValStr()
+
+			// Check if the template expects inner content.
 			// We pick the first template for an arbitrary output format
 			// if more than one. It is "all inner or no inner".
-			tmpl := getShortcodeTemplateForTemplateKey(scKey{}, sc.name, p.s.Tmpl)
-			if tmpl == nil {
-				return sc, fail(_errors.Errorf("template for shortcode %q not found", sc.name), currItem)
-			}
-
-			var err error
-			isInner, err = isInnerShortcode(tmpl.(tpl.TemplateExecutor))
-			if err != nil {
-				return sc, fail(_errors.Wrapf(err, "failed to handle template for shortcode %q", sc.name), currItem)
+			tmpl, found, _ := s.s.Tmpl.LookupVariant(sc.name, tpl.TemplateVariants{})
+			if !found {
+				return nil, _errors.Errorf("template for shortcode %q not found", sc.name)
 			}
 
+			sc.info = tmpl.(tpl.TemplateInfoProvider).TemplateInfo()
 		case currItem.IsInlineShortcodeName():
 			sc.name = currItem.ValStr()
 			sc.isInline = true
-
 		case currItem.IsShortcodeParam():
 			if !pt.IsValueNext() {
 				continue
@@ -721,7 +561,6 @@ Loop:
 
 				}
 			}
-
 		case currItem.IsDone():
 			// handled by caller
 			pt.Backup()
@@ -732,11 +571,9 @@ Loop:
 	return sc, nil
 }
 
-var shortCodeStart = []byte("{{")
-
-// Replace prefixed shortcode tokens (HUGOSHORTCODE-1, HUGOSHORTCODE-2) with the real content.
+// Replace prefixed shortcode tokens with the real content.
 // Note: This function will rewrite the input slice.
-func replaceShortcodeTokens(source []byte, prefix string, replacements map[string]string) ([]byte, error) {
+func replaceShortcodeTokens(source []byte, replacements map[string]string) ([]byte, error) {
 
 	if len(replacements) == 0 {
 		return source, nil
@@ -744,7 +581,7 @@ func replaceShortcodeTokens(source []byte, prefix string, replacements map[strin
 
 	start := 0
 
-	pre := []byte("HAHA" + prefix)
+	pre := []byte(shortcodePlaceholderPrefix)
 	post := []byte("HBHB")
 	pStart := []byte("<p>")
 	pEnd := []byte("</p>")
@@ -781,54 +618,11 @@ func replaceShortcodeTokens(source []byte, prefix string, replacements map[strin
 	return source, nil
 }
 
-func getShortcodeTemplateForTemplateKey(key scKey, shortcodeName string, t tpl.TemplateFinder) tpl.Template {
-	isInnerShortcodeCache.RLock()
-	defer isInnerShortcodeCache.RUnlock()
-
-	var names []string
-
-	suffix := strings.ToLower(key.Suffix)
-	outFormat := strings.ToLower(key.OutputFormat)
-	lang := strings.ToLower(key.Lang)
-
-	if outFormat != "" && suffix != "" {
-		if lang != "" {
-			names = append(names, fmt.Sprintf("%s.%s.%s.%s", shortcodeName, lang, outFormat, suffix))
-		}
-		names = append(names, fmt.Sprintf("%s.%s.%s", shortcodeName, outFormat, suffix))
-	}
-
-	if suffix != "" {
-		if lang != "" {
-			names = append(names, fmt.Sprintf("%s.%s.%s", shortcodeName, lang, suffix))
-		}
-		names = append(names, fmt.Sprintf("%s.%s", shortcodeName, suffix))
-	}
-
-	names = append(names, shortcodeName)
-
-	for _, name := range names {
-
-		if x, found := t.Lookup("shortcodes/" + name); found {
-			return x
-		}
-		if x, found := t.Lookup("theme/shortcodes/" + name); found {
-			return x
-		}
-		if x, found := t.Lookup("_internal/shortcodes/" + name); found {
-			return x
-		}
-	}
-	return nil
-}
-
 func renderShortcodeWithPage(tmpl tpl.Template, data *ShortcodeWithPage) (string, error) {
 	buffer := bp.GetBuffer()
 	defer bp.PutBuffer(buffer)
 
-	isInnerShortcodeCache.RLock()
 	err := tmpl.Execute(buffer, data)
-	isInnerShortcodeCache.RUnlock()
 	if err != nil {
 		return "", _errors.Wrap(err, "failed to process shortcode")
 	}
diff --git a/hugolib/shortcode_page.go b/hugolib/shortcode_page.go
new file mode 100644
index 00000000000..e8a3a37e19b
--- /dev/null
+++ b/hugolib/shortcode_page.go
@@ -0,0 +1,56 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"html/template"
+
+	"github.com/gohugoio/hugo/resources/page"
+)
+
+var tocShortcodePlaceholder = createShortcodePlaceholder("TOC", 0)
+
+// This is sent to the shortcodes. They cannot access the content
+// they're a part of. It would cause an infinite regress.
+//
+// Go doesn't support virtual methods, so this careful dance is currently (I think)
+// the best we can do.
+type pageForShortcode struct {
+	page.PageWithoutContent
+	page.ContentProvider
+
+	// We need to replace it after we have rendered it, so provide a
+	// temporary placeholder.
+	toc template.HTML
+
+	p *pageState
+}
+
+func newPageForShortcode(p *pageState) page.Page {
+	return &pageForShortcode{
+		PageWithoutContent: p,
+		ContentProvider:    page.NopPage,
+		toc:                template.HTML(tocShortcodePlaceholder),
+		p:                  p,
+	}
+}
+
+func (p *pageForShortcode) page() page.Page {
+	return p.PageWithoutContent.(page.Page)
+}
+
+func (p *pageForShortcode) TableOfContents() template.HTML {
+	p.p.enablePlaceholders()
+	return p.toc
+}
diff --git a/hugolib/shortcode_test.go b/hugolib/shortcode_test.go
index 16ff0b7806b..b76a586daae 100644
--- a/hugolib/shortcode_test.go
+++ b/hugolib/shortcode_test.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -16,9 +16,13 @@ package hugolib
 import (
 	"fmt"
 	"path/filepath"
-	"reflect"
 	"regexp"
-	"sort"
+
+	"reflect"
+
+	"github.com/gohugoio/hugo/parser/pageparser"
+	"github.com/gohugoio/hugo/resources/page"
+
 	"strings"
 	"testing"
 
@@ -26,34 +30,14 @@ import (
 
 	"github.com/spf13/afero"
 
-	"github.com/gohugoio/hugo/output"
-
-	"github.com/gohugoio/hugo/media"
-
 	"github.com/gohugoio/hugo/deps"
 	"github.com/gohugoio/hugo/helpers"
 	"github.com/gohugoio/hugo/tpl"
+	"github.com/spf13/cast"
 
 	"github.com/stretchr/testify/require"
 )
 
-// TODO(bep) remove
-func pageFromString(in, filename string, shortcodePlaceholderFn func() string, withTemplate ...func(templ tpl.TemplateHandler) error) (*Page, error) {
-	var err error
-	cfg, fs := newTestCfg()
-
-	d := deps.DepsCfg{Cfg: cfg, Fs: fs, WithTemplate: withTemplate[0]}
-
-	s, err := NewSiteForCfg(d)
-	if err != nil {
-		return nil, err
-	}
-
-	s.shortcodePlaceholderFunc = shortcodePlaceholderFn
-
-	return s.newPageFrom(strings.NewReader(in), filename)
-}
-
 func CheckShortCodeMatch(t *testing.T, input, expected string, withTemplate func(templ tpl.TemplateHandler) error) {
 	CheckShortCodeMatchAndError(t, input, expected, withTemplate, false)
 }
@@ -85,16 +69,16 @@ title: "Title"
 		t.Fatalf("No error from shortcode")
 	}
 
-	require.Len(t, h.Sites[0].RegularPages, 1)
+	require.Len(t, h.Sites[0].RegularPages(), 1)
 
-	output := strings.TrimSpace(string(h.Sites[0].RegularPages[0].content()))
+	output := strings.TrimSpace(content(h.Sites[0].RegularPages()[0]))
 	output = strings.TrimPrefix(output, "<p>")
 	output = strings.TrimSuffix(output, "</p>")
 
 	expected = strings.TrimSpace(expected)
 
 	if output != expected {
-		t.Fatalf("Shortcode render didn't match. got \n%q but expected \n%q", output, expected)
+		Fatalf(t, "Shortcode render didn't match. got \n%q but expected \n%q", output, expected)
 	}
 }
 
@@ -161,6 +145,30 @@ func TestShortcodeRelated(t *testing.T) {
 	CheckShortCodeMatch(t, "{{< a >}}", "0", wt)
 }
 
+func TestShortcodeInnerMarkup(t *testing.T) {
+	t.Parallel()
+	wt := func(tem tpl.TemplateHandler) error {
+		tem.AddTemplate("shortcodes/a.html", `<div>{{ .Inner }}</div>`)
+		tem.AddTemplate("shortcodes/b.html", `**Bold**: <div>{{ .Inner }}</div>`)
+		return nil
+	}
+
+	CheckShortCodeMatch(t,
+		"{{< a >}}B: <div>{{% b %}}**Bold**{{% /b %}}</div>{{< /a >}}",
+		// This assertion looks odd, but is correct: for inner shortcodes with
+		// the {{% we treats the .Inner content as markup, but not the shortcode
+		// itself.
+		"<div>B: <div>**Bold**: <div><strong>Bold</strong></div></div></div>",
+		wt)
+
+	CheckShortCodeMatch(t,
+		"{{% b %}}This is **B**: {{< b >}}This is B{{< /b>}}{{% /b %}}",
+		"<strong>Bold</strong>: <div>This is <strong>B</strong>: <strong>Bold</strong>: <div>This is B</div></div>",
+		wt)
+}
+
+// TODO(bep) page   Pages         kubernetes   | 2739 | 286 | 298 |  71 |  47 |  14   vs
+
 // some repro issues for panics in Go Fuzz testing
 
 func TestNamedParamSC(t *testing.T) {
@@ -188,7 +196,7 @@ func TestNestedNamedMissingParam(t *testing.T) {
 	}
 	CheckShortCodeMatch(t,
 		`{{% acc %}}{{% div %}}d1{{% /div %}}{{% div2 %}}d2{{% /div2 %}}{{% /acc %}}`,
-		"<div class=\"acc\"><div >d1</div><div >d2</div>\n</div>", wt)
+		"<div class=\"acc\"><div >d1</div><div >d2</div></div>", wt)
 }
 
 func TestIsNamedParamsSC(t *testing.T) {
@@ -218,39 +226,18 @@ func TestInnerSC(t *testing.T) {
 func TestInnerSCWithMarkdown(t *testing.T) {
 	t.Parallel()
 	wt := func(tem tpl.TemplateHandler) error {
-		tem.AddTemplate("_internal/shortcodes/inside.html", `<div{{with .Get "class"}} class="{{.}}"{{end}}>{{ .Inner }}</div>`)
+		// Note: In Hugo 0.55 we made it so any outer {{%'s inner content was rendered as part of the surrounding
+		// markup. This solved lots of problems, but it also meant that this test had to be adjusted.
+		tem.AddTemplate("_internal/shortcodes/wrapper.html", `<div{{with .Get "class"}} class="{{.}}"{{end}}>{{ .Inner }}</div>`)
+		tem.AddTemplate("_internal/shortcodes/inside.html", `{{ .Inner }}`)
 		return nil
 	}
-	CheckShortCodeMatch(t, `{{% inside %}}
+	CheckShortCodeMatch(t, `{{< wrapper >}}{{% inside %}}
 # More Here
 
 [link](http://spf13.com) and text
 
-{{% /inside %}}`, "<div><h1 id=\"more-here\">More Here</h1>\n\n<p><a href=\"http://spf13.com\">link</a> and text</p>\n</div>", wt)
-}
-
-func TestInnerSCWithAndWithoutMarkdown(t *testing.T) {
-	t.Parallel()
-	wt := func(tem tpl.TemplateHandler) error {
-		tem.AddTemplate("_internal/shortcodes/inside.html", `<div{{with .Get "class"}} class="{{.}}"{{end}}>{{ .Inner }}</div>`)
-		return nil
-	}
-	CheckShortCodeMatch(t, `{{% inside %}}
-# More Here
-
-[link](http://spf13.com) and text
-
-{{% /inside %}}
-
-And then:
-
-{{< inside >}}
-# More Here
-
-This is **plain** text.
-
-{{< /inside >}}
-`, "<div><h1 id=\"more-here\">More Here</h1>\n\n<p><a href=\"http://spf13.com\">link</a> and text</p>\n</div>\n\n<p>And then:</p>\n\n<div>\n# More Here\n\nThis is **plain** text.\n\n</div>", wt)
+{{% /inside %}}{{< /wrapper >}}`, "<div><h1 id=\"more-here\">More Here</h1>\n\n<p><a href=\"http://spf13.com\">link</a> and text</p>\n</div>", wt)
 }
 
 func TestEmbeddedSC(t *testing.T) {
@@ -266,7 +253,7 @@ func TestNestedSC(t *testing.T) {
 		tem.AddTemplate("_internal/shortcodes/scn2.html", `<div>SC2</div>`)
 		return nil
 	}
-	CheckShortCodeMatch(t, `{{% scn1 %}}{{% scn2 %}}{{% /scn1 %}}`, "<div>Outer, inner is <div>SC2</div>\n</div>", wt)
+	CheckShortCodeMatch(t, `{{% scn1 %}}{{% scn2 %}}{{% /scn1 %}}`, "<div>Outer, inner is <div>SC2</div></div>", wt)
 
 	CheckShortCodeMatch(t, `{{< scn1 >}}{{% scn2 %}}{{< /scn1 >}}`, "<div>Outer, inner is <div>SC2</div></div>", wt)
 }
@@ -355,136 +342,100 @@ func TestShortcodeWrappedInPIssue(t *testing.T) {
 `, "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\n\nxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", wt)
 }
 
-const testScPlaceholderRegexp = "HAHAHUGOSHORTCODE-\\d+HBHB"
-
 func TestExtractShortcodes(t *testing.T) {
 	t.Parallel()
+	b := newTestSitesBuilder(t).WithSimpleConfigFile()
+
+	b.WithTemplates(
+		"default/single.html", `EMPTY`,
+		"_internal/shortcodes/tag.html", `tag`,
+		"_internal/shortcodes/legacytag.html", `{{ $_hugo_config := "{ \"version\": 1 }" }}tag`,
+		"_internal/shortcodes/sc1.html", `sc1`,
+		"_internal/shortcodes/sc2.html", `sc2`,
+		"_internal/shortcodes/inner.html", `{{with .Inner }}{{ . }}{{ end }}`,
+		"_internal/shortcodes/inner2.html", `{{.Inner}}`,
+		"_internal/shortcodes/inner3.html", `{{.Inner}}`,
+	).WithContent("page.md", `---
+title: "Shortcodes Galore!"
+---
+`)
 
-	for i, this := range []struct {
-		name             string
-		input            string
-		expectShortCodes string
-		expect           interface{}
-		expectErrorMsg   string
-	}{
-		{"text", "Some text.", "map[]", "Some text.", ""},
-		{"invalid right delim", "{{< tag }}", "", false, "unrecognized character"},
-		{"invalid close", "\n{{< /tag >}}", "", false, "got closing shortcode, but none is open"},
-		{"invalid close2", "\n\n{{< tag >}}{{< /anotherTag >}}", "", false, "closing tag for shortcode 'anotherTag' does not match start tag"},
-		{"unterminated quote 1", `{{< figure src="im caption="S" >}}`, "", false, "got pos"},
-		{"unterminated quote 1", `{{< figure src="im" caption="S >}}`, "", false, "unterm"},
-		{"one shortcode, no markup", "{{< tag >}}", "", testScPlaceholderRegexp, ""},
-		{"one shortcode, markup", "{{% tag %}}", "", testScPlaceholderRegexp, ""},
-		{"one pos param", "{{% tag param1 %}}", `tag([\"param1\"], true){[]}"]`, testScPlaceholderRegexp, ""},
-		{"two pos params", "{{< tag param1 param2>}}", `tag([\"param1\" \"param2\"], false){[]}"]`, testScPlaceholderRegexp, ""},
-		{"one named param", `{{% tag param1="value" %}}`, `tag([\"param1:value\"], true){[]}`, testScPlaceholderRegexp, ""},
-		{"two named params", `{{< tag param1="value1" param2="value2" >}}`, `tag([\"param1:value1\" \"param2:value2\"], false){[]}"]`,
-			testScPlaceholderRegexp, ""},
-		{"inner", `Some text. {{< inner >}}Inner Content{{< / inner >}}. Some more text.`, `inner([], false){[Inner Content]}`,
-			fmt.Sprintf("Some text. %s. Some more text.", testScPlaceholderRegexp), ""},
-		// issue #934
-		{"inner self-closing", `Some text. {{< inner />}}. Some more text.`, `inner([], false){[]}`,
-			fmt.Sprintf("Some text. %s. Some more text.", testScPlaceholderRegexp), ""},
-		{"close, but not inner", "{{< tag >}}foo{{< /tag >}}", "", false, `shortcode "tag" has no .Inner, yet a closing tag was provided`},
-		{"nested inner", `Inner->{{< inner >}}Inner Content->{{% inner2 param1 %}}inner2txt{{% /inner2 %}}Inner close->{{< / inner >}}<-done`,
-			`inner([], false){[Inner Content-> inner2([\"param1\"], true){[inner2txt]} Inner close->]}`,
-			fmt.Sprintf("Inner->%s<-done", testScPlaceholderRegexp), ""},
-		{"nested, nested inner", `Inner->{{< inner >}}inner2->{{% inner2 param1 %}}inner2txt->inner3{{< inner3>}}inner3txt{{</ inner3 >}}{{% /inner2 %}}final close->{{< / inner >}}<-done`,
-			`inner([], false){[inner2-> inner2([\"param1\"], true){[inner2txt->inner3 inner3(%!q(<nil>), false){[inner3txt]}]} final close->`,
-			fmt.Sprintf("Inner->%s<-done", testScPlaceholderRegexp), ""},
-		{"two inner", `Some text. {{% inner %}}First **Inner** Content{{% / inner %}} {{< inner >}}Inner **Content**{{< / inner >}}. Some more text.`,
-			`map["HAHAHUGOSHORTCODE-1HBHB:inner([], true){[First **Inner** Content]}" "HAHAHUGOSHORTCODE-2HBHB:inner([], false){[Inner **Content**]}"]`,
-			fmt.Sprintf("Some text. %s %s. Some more text.", testScPlaceholderRegexp, testScPlaceholderRegexp), ""},
-		{"closed without content", `Some text. {{< inner param1 >}}{{< / inner >}}. Some more text.`, `inner([\"param1\"], false){[]}`,
-			fmt.Sprintf("Some text. %s. Some more text.", testScPlaceholderRegexp), ""},
-		{"two shortcodes", "{{< sc1 >}}{{< sc2 >}}",
-			`map["HAHAHUGOSHORTCODE-1HBHB:sc1([], false){[]}" "HAHAHUGOSHORTCODE-2HBHB:sc2([], false){[]}"]`,
-			testScPlaceholderRegexp + testScPlaceholderRegexp, ""},
-		{"mix of shortcodes", `Hello {{< sc1 >}}world{{% sc2 p2="2"%}}. And that's it.`,
-			`map["HAHAHUGOSHORTCODE-1HBHB:sc1([], false){[]}" "HAHAHUGOSHORTCODE-2HBHB:sc2([\"p2:2\"]`,
-			fmt.Sprintf("Hello %sworld%s. And that's it.", testScPlaceholderRegexp, testScPlaceholderRegexp), ""},
-		{"mix with inner", `Hello {{< sc1 >}}world{{% inner p2="2"%}}Inner{{%/ inner %}}. And that's it.`,
-			`map["HAHAHUGOSHORTCODE-1HBHB:sc1([], false){[]}" "HAHAHUGOSHORTCODE-2HBHB:inner([\"p2:2\"], true){[Inner]}"]`,
-			fmt.Sprintf("Hello %sworld%s. And that's it.", testScPlaceholderRegexp, testScPlaceholderRegexp), ""},
-	} {
+	b.CreateSites().Build(BuildCfg{})
 
-		pageInput := simplePage + this.input
+	s := b.H.Sites[0]
 
-		counter := 0
-		placeholderFunc := func() string {
-			counter++
-			return fmt.Sprintf("HAHA%s-%dHBHB", shortcodePlaceholderPrefix, counter)
+	/*errCheck := func(s string) func(name string, assert *require.Assertions, shortcode *shortcode, err error) {
+		return func(name string, assert *require.Assertions, shortcode *shortcode, err error) {
+			assert.Error(err, name)
+			assert.Equal(s, err.Error(), name)
 		}
+	}*/
 
-		p, err := pageFromString(pageInput, "simple.md", placeholderFunc, func(templ tpl.TemplateHandler) error {
-			templ.AddTemplate("_internal/shortcodes/tag.html", `tag`)
-			templ.AddTemplate("_internal/shortcodes/sc1.html", `sc1`)
-			templ.AddTemplate("_internal/shortcodes/sc2.html", `sc2`)
-			templ.AddTemplate("_internal/shortcodes/inner.html", `{{with .Inner }}{{ . }}{{ end }}`)
-			templ.AddTemplate("_internal/shortcodes/inner2.html", `{{.Inner}}`)
-			templ.AddTemplate("_internal/shortcodes/inner3.html", `{{.Inner}}`)
-			return nil
-		})
+	// Make it more regexp friendly
+	strReplacer := strings.NewReplacer("[", "{", "]", "}")
 
-		if b, ok := this.expect.(bool); ok && !b {
-			if err == nil {
-				t.Fatalf("[%d] %s: ExtractShortcodes didn't return an expected error", i, this.name)
-			} else {
-				r := regexp.MustCompile(this.expectErrorMsg)
-				if !r.MatchString(err.Error()) {
-					t.Fatalf("[%d] %s: ExtractShortcodes didn't return an expected error message, got\n%s but expected\n%s",
-						i, this.name, err.Error(), this.expectErrorMsg)
-				}
-			}
-			continue
-		} else {
-			if err != nil {
-				t.Fatalf("[%d] %s: failed: %q", i, this.name, err)
-			}
+	str := func(s *shortcode) string {
+		if s == nil {
+			return "<nil>"
 		}
+		return strReplacer.Replace(fmt.Sprintf("%s;inline:%t;closing:%t;inner:%v;params:%v;ordinal:%d;markup:%t;version:%d;pos:%d",
+			s.name, s.isInline, s.isClosing, s.inner, s.params, s.ordinal, s.doMarkup, s.info.Config.Version, s.pos))
+	}
 
-		shortCodes := p.shortcodeState.shortcodes
-		contentReplaced := string(p.workContent)
-
-		var expected string
-		av := reflect.ValueOf(this.expect)
-		switch av.Kind() {
-		case reflect.String:
-			expected = av.String()
+	regexpCheck := func(re string) func(assert *require.Assertions, shortcode *shortcode, err error) {
+		return func(assert *require.Assertions, shortcode *shortcode, err error) {
+			assert.NoError(err)
+			got := str(shortcode)
+			assert.Regexp(regexp.MustCompile(re), got, got)
 		}
+	}
 
-		r, err := regexp.Compile(expected)
+	for _, test := range []struct {
+		name  string
+		input string
+		check func(assert *require.Assertions, shortcode *shortcode, err error)
+	}{
+		{"one shortcode, no markup", "{{< tag >}}", regexpCheck("tag.*closing:false.*markup:false")},
+		{"one shortcode, markup", "{{% tag %}}", regexpCheck("tag.*closing:false.*markup:true;version:2")},
+		{"one shortcode, markup, legacy", "{{% legacytag %}}", regexpCheck("tag.*closing:false.*markup:true;version:1")},
+		{"outer shortcode markup", "{{% inner %}}{{< tag >}}{{% /inner %}}", regexpCheck("inner.*closing:true.*markup:true")},
+		{"inner shortcode markup", "{{< inner >}}{{% tag %}}{{< /inner >}}", regexpCheck("inner.*closing:true.*;markup:false;version:2")},
+		{"one pos param", "{{% tag param1 %}}", regexpCheck("tag.*params:{param1}")},
+		{"two pos params", "{{< tag param1 param2>}}", regexpCheck("tag.*params:{param1 param2}")},
+		{"one named param", `{{% tag param1="value" %}}`, regexpCheck("tag.*params:map{param1:value}")},
+		{"two named params", `{{< tag param1="value1" param2="value2" >}}`, regexpCheck("tag.*params:map{param\\d:value\\d param\\d:value\\d}")},
+		{"inner", `{{< inner >}}Inner Content{{< / inner >}}`, regexpCheck("inner;inline:false;closing:true;inner:{Inner Content};")},
+		// issue #934
+		{"inner self-closing", `{{< inner />}}`, regexpCheck("inner;.*inner:{}")},
+		{"nested inner", `{{< inner >}}Inner Content->{{% inner2 param1 %}}inner2txt{{% /inner2 %}}Inner close->{{< / inner >}}`,
+			regexpCheck("inner;.*inner:{Inner Content->.*Inner close->}")},
+		{"nested, nested inner", `{{< inner >}}inner2->{{% inner2 param1 %}}inner2txt->inner3{{< inner3>}}inner3txt{{</ inner3 >}}{{% /inner2 %}}final close->{{< / inner >}}`,
+			regexpCheck("inner:{inner2-> inner2.*{{inner2txt->inner3.*final close->}")},
+		{"closed without content", `{{< inner param1 >}}{{< / inner >}}`, regexpCheck("inner.*inner:{}")},
+		{"inline", `{{< my.inline >}}Hi{{< /my.inline >}}`, regexpCheck("my.inline;inline:true;closing:true;inner:{Hi};")},
+	} {
 
-		if err != nil {
-			t.Fatalf("[%d] %s: Failed to compile regexp %q: %q", i, this.name, expected, err)
-		}
+		t.Run(test.name, func(t *testing.T) {
+			assert := require.New(t)
 
-		if strings.Count(contentReplaced, shortcodePlaceholderPrefix) != shortCodes.Len() {
-			t.Fatalf("[%d] %s: Not enough placeholders, found %d", i, this.name, shortCodes.Len())
-		}
+			counter := 0
+			placeholderFunc := func() string {
+				counter++
+				return fmt.Sprintf("HAHA%s-%dHBHB", shortcodePlaceholderPrefix, counter)
+			}
 
-		if !r.MatchString(contentReplaced) {
-			t.Fatalf("[%d] %s: Shortcode extract didn't match. got %q but expected %q", i, this.name, contentReplaced, expected)
-		}
+			p, err := pageparser.ParseMain(strings.NewReader(test.input), pageparser.Config{})
+			assert.NoError(err)
+			handler := newShortcodeHandler(nil, s, placeholderFunc)
+			iter := p.Iterator()
 
-		for _, placeHolder := range shortCodes.Keys() {
-			sc := shortCodes.getShortcode(placeHolder)
-			if !strings.Contains(contentReplaced, placeHolder.(string)) {
-				t.Fatalf("[%d] %s: Output does not contain placeholder %q", i, this.name, placeHolder)
-			}
+			short, err := handler.extractShortcode(0, 0, iter)
 
-			if sc.params == nil {
-				t.Fatalf("[%d] %s: Params is nil for shortcode '%s'", i, this.name, sc.name)
-			}
-		}
+			test.check(assert, short, err)
 
-		if this.expectShortCodes != "" {
-			shortCodesAsStr := fmt.Sprintf("map%q", collectAndSortShortcodes(shortCodes))
-			if !strings.Contains(shortCodesAsStr, this.expectShortCodes) {
-				t.Fatalf("[%d] %s: Shortcodes not as expected, got\n%s but expected\n%s", i, this.name, shortCodesAsStr, this.expectShortCodes)
-			}
-		}
+		})
 	}
+
 }
 
 func TestShortcodesInSite(t *testing.T) {
@@ -495,7 +446,7 @@ func TestShortcodesInSite(t *testing.T) {
 		contentPath string
 		content     string
 		outFile     string
-		expected    string
+		expected    interface{}
 	}{
 		{"sect/doc1.md", `a{{< b >}}c`,
 			filepath.FromSlash("public/sect/doc1/index.html"), "<p>abc</p>\n"},
@@ -542,7 +493,7 @@ e`,
 		// #2192 #2209: Shortcodes in markdown headers
 		{"sect/doc5.md", `# {{< b >}}	
 ## {{% c %}}`,
-			filepath.FromSlash("public/sect/doc5/index.html"), "\n\n<h1 id=\"hahahugoshortcode-1hbhb\">b</h1>\n\n<h2 id=\"hahahugoshortcode-2hbhb\">c</h2>\n"},
+			filepath.FromSlash("public/sect/doc5/index.html"), `-hbhb">b</h1>`},
 		// #2223 pygments
 		{"sect/doc6.md", "\n```bash\nb = {{< b >}} c = {{% c %}}\n```\n",
 			filepath.FromSlash("public/sect/doc6/index.html"),
@@ -591,7 +542,7 @@ tags:
 	}
 
 	addTemplates := func(templ tpl.TemplateHandler) error {
-		templ.AddTemplate("_default/single.html", "{{.Content}}")
+		templ.AddTemplate("_default/single.html", "{{.Content}} Word Count: {{ .WordCount }}")
 
 		templ.AddTemplate("_internal/shortcodes/b.html", `b`)
 		templ.AddTemplate("_internal/shortcodes/c.html", `c`)
@@ -616,21 +567,21 @@ tags:
 	writeSourcesToSource(t, "content", fs, sources...)
 
 	s := buildSingleSite(t, deps.DepsCfg{WithTemplate: addTemplates, Fs: fs, Cfg: cfg}, BuildCfg{})
-	th := testHelper{s.Cfg, s.Fs, t}
-
-	for _, test := range tests {
-		if strings.HasSuffix(test.contentPath, ".ad") && !helpers.HasAsciidoc() {
-			fmt.Println("Skip Asciidoc test case as no Asciidoc present.")
-			continue
-		} else if strings.HasSuffix(test.contentPath, ".rst") && !helpers.HasRst() {
-			fmt.Println("Skip Rst test case as no rst2html present.")
-			continue
-		} else if strings.Contains(test.expected, "code") {
-			fmt.Println("Skip Pygments test case as no pygments present.")
-			continue
-		}
 
-		th.assertFileContent(test.outFile, test.expected)
+	for i, test := range tests {
+		t.Run(fmt.Sprintf("test=%d;contentPath=%s", i, test.contentPath), func(t *testing.T) {
+			if strings.HasSuffix(test.contentPath, ".ad") && !helpers.HasAsciidoc() {
+				t.Skip("Skip Asciidoc test case as no Asciidoc present.")
+			} else if strings.HasSuffix(test.contentPath, ".rst") && !helpers.HasRst() {
+				t.Skip("Skip Rst test case as no rst2html present.")
+			}
+
+			th := testHelper{s.Cfg, s.Fs, t}
+
+			expected := cast.ToStringSlice(test.expected)
+			th.assertFileContent(test.outFile, expected...)
+		})
+
 	}
 
 }
@@ -703,9 +654,9 @@ CSV: {{< myShort >}}
 	require.Len(t, h.Sites, 1)
 
 	s := h.Sites[0]
-	home := s.getPage(KindHome)
+	home := s.getPage(page.KindHome)
 	require.NotNil(t, home)
-	require.Len(t, home.outputFormats, 3)
+	require.Len(t, home.OutputFormats(), 3)
 
 	th.assertFileContent("public/index.html",
 		"Home HTML",
@@ -763,19 +714,6 @@ CSV: {{< myShort >}}
 
 }
 
-func collectAndSortShortcodes(shortcodes *orderedMap) []string {
-	var asArray []string
-
-	for _, key := range shortcodes.Keys() {
-		sc := shortcodes.getShortcode(key)
-		asArray = append(asArray, fmt.Sprintf("%s:%s", key, sc))
-	}
-
-	sort.Strings(asArray)
-	return asArray
-
-}
-
 func BenchmarkReplaceShortcodeTokens(b *testing.B) {
 
 	type input struct {
@@ -811,7 +749,7 @@ func BenchmarkReplaceShortcodeTokens(b *testing.B) {
 		for j := range data {
 			currIn := in[cnt]
 			cnt++
-			results, err := replaceShortcodeTokens(currIn.in, "HUGOSHORTCODE", currIn.replacements)
+			results, err := replaceShortcodeTokens(currIn.in, currIn.replacements)
 
 			if err != nil {
 				b.Fatalf("[%d] failed: %s", i, err)
@@ -834,36 +772,36 @@ func TestReplaceShortcodeTokens(t *testing.T) {
 		replacements map[string]string
 		expect       interface{}
 	}{
-		{"Hello HAHAPREFIX-1HBHB.", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "World"}, "Hello World."},
-		{"Hello HAHAPREFIX-1@}@.", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "World"}, false},
-		{"HAHAPREFIX2-1HBHB", "PREFIX2", map[string]string{"HAHAPREFIX2-1HBHB": "World"}, "World"},
+		{"Hello HAHAHUGOSHORTCODE-1HBHB.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, "Hello World."},
+		{"Hello HAHAHUGOSHORTCODE-1@}@.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, false},
+		{"HAHAHUGOSHORTCODE2-1HBHB", "PREFIX2", map[string]string{"HAHAHUGOSHORTCODE2-1HBHB": "World"}, "World"},
 		{"Hello World!", "PREFIX2", map[string]string{}, "Hello World!"},
-		{"!HAHAPREFIX-1HBHB", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "World"}, "!World"},
-		{"HAHAPREFIX-1HBHB!", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "World"}, "World!"},
-		{"!HAHAPREFIX-1HBHB!", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "World"}, "!World!"},
-		{"_{_PREFIX-1HBHB", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "World"}, "_{_PREFIX-1HBHB"},
-		{"Hello HAHAPREFIX-1HBHB.", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "To You My Old Friend Who Told Me This Fantastic Story"}, "Hello To You My Old Friend Who Told Me This Fantastic Story."},
-		{"A HAHAA-1HBHB asdf HAHAA-2HBHB.", "A", map[string]string{"HAHAA-1HBHB": "v1", "HAHAA-2HBHB": "v2"}, "A v1 asdf v2."},
-		{"Hello HAHAPREFIX2-1HBHB. Go HAHAPREFIX2-2HBHB, Go, Go HAHAPREFIX2-3HBHB Go Go!.", "PREFIX2", map[string]string{"HAHAPREFIX2-1HBHB": "Europe", "HAHAPREFIX2-2HBHB": "Jonny", "HAHAPREFIX2-3HBHB": "Johnny"}, "Hello Europe. Go Jonny, Go, Go Johnny Go Go!."},
-		{"A HAHAPREFIX-2HBHB HAHAPREFIX-1HBHB.", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "A", "HAHAPREFIX-2HBHB": "B"}, "A B A."},
-		{"A HAHAPREFIX-1HBHB HAHAPREFIX-2", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "A"}, false},
-		{"A HAHAPREFIX-1HBHB but not the second.", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "A", "HAHAPREFIX-2HBHB": "B"}, "A A but not the second."},
-		{"An HAHAPREFIX-1HBHB.", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "A", "HAHAPREFIX-2HBHB": "B"}, "An A."},
-		{"An HAHAPREFIX-1HBHB HAHAPREFIX-2HBHB.", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "A", "HAHAPREFIX-2HBHB": "B"}, "An A B."},
-		{"A HAHAPREFIX-1HBHB HAHAPREFIX-2HBHB HAHAPREFIX-3HBHB HAHAPREFIX-1HBHB HAHAPREFIX-3HBHB.", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "A", "HAHAPREFIX-2HBHB": "B", "HAHAPREFIX-3HBHB": "C"}, "A A B C A C."},
-		{"A HAHAPREFIX-1HBHB HAHAPREFIX-2HBHB HAHAPREFIX-3HBHB HAHAPREFIX-1HBHB HAHAPREFIX-3HBHB.", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "A", "HAHAPREFIX-2HBHB": "B", "HAHAPREFIX-3HBHB": "C"}, "A A B C A C."},
+		{"!HAHAHUGOSHORTCODE-1HBHB", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, "!World"},
+		{"HAHAHUGOSHORTCODE-1HBHB!", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, "World!"},
+		{"!HAHAHUGOSHORTCODE-1HBHB!", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, "!World!"},
+		{"_{_PREFIX-1HBHB", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, "_{_PREFIX-1HBHB"},
+		{"Hello HAHAHUGOSHORTCODE-1HBHB.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "To You My Old Friend Who Told Me This Fantastic Story"}, "Hello To You My Old Friend Who Told Me This Fantastic Story."},
+		{"A HAHAHUGOSHORTCODE-1HBHB asdf HAHAHUGOSHORTCODE-2HBHB.", "A", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "v1", "HAHAHUGOSHORTCODE-2HBHB": "v2"}, "A v1 asdf v2."},
+		{"Hello HAHAHUGOSHORTCODE2-1HBHB. Go HAHAHUGOSHORTCODE2-2HBHB, Go, Go HAHAHUGOSHORTCODE2-3HBHB Go Go!.", "PREFIX2", map[string]string{"HAHAHUGOSHORTCODE2-1HBHB": "Europe", "HAHAHUGOSHORTCODE2-2HBHB": "Jonny", "HAHAHUGOSHORTCODE2-3HBHB": "Johnny"}, "Hello Europe. Go Jonny, Go, Go Johnny Go Go!."},
+		{"A HAHAHUGOSHORTCODE-2HBHB HAHAHUGOSHORTCODE-1HBHB.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "A", "HAHAHUGOSHORTCODE-2HBHB": "B"}, "A B A."},
+		{"A HAHAHUGOSHORTCODE-1HBHB HAHAHUGOSHORTCODE-2", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "A"}, false},
+		{"A HAHAHUGOSHORTCODE-1HBHB but not the second.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "A", "HAHAHUGOSHORTCODE-2HBHB": "B"}, "A A but not the second."},
+		{"An HAHAHUGOSHORTCODE-1HBHB.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "A", "HAHAHUGOSHORTCODE-2HBHB": "B"}, "An A."},
+		{"An HAHAHUGOSHORTCODE-1HBHB HAHAHUGOSHORTCODE-2HBHB.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "A", "HAHAHUGOSHORTCODE-2HBHB": "B"}, "An A B."},
+		{"A HAHAHUGOSHORTCODE-1HBHB HAHAHUGOSHORTCODE-2HBHB HAHAHUGOSHORTCODE-3HBHB HAHAHUGOSHORTCODE-1HBHB HAHAHUGOSHORTCODE-3HBHB.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "A", "HAHAHUGOSHORTCODE-2HBHB": "B", "HAHAHUGOSHORTCODE-3HBHB": "C"}, "A A B C A C."},
+		{"A HAHAHUGOSHORTCODE-1HBHB HAHAHUGOSHORTCODE-2HBHB HAHAHUGOSHORTCODE-3HBHB HAHAHUGOSHORTCODE-1HBHB HAHAHUGOSHORTCODE-3HBHB.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "A", "HAHAHUGOSHORTCODE-2HBHB": "B", "HAHAHUGOSHORTCODE-3HBHB": "C"}, "A A B C A C."},
 		// Issue #1148 remove p-tags 10 =>
-		{"Hello <p>HAHAPREFIX-1HBHB</p>. END.", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "World"}, "Hello World. END."},
-		{"Hello <p>HAHAPREFIX-1HBHB</p>. <p>HAHAPREFIX-2HBHB</p> END.", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "World", "HAHAPREFIX-2HBHB": "THE"}, "Hello World. THE END."},
-		{"Hello <p>HAHAPREFIX-1HBHB. END</p>.", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "World"}, "Hello <p>World. END</p>."},
-		{"<p>Hello HAHAPREFIX-1HBHB</p>. END.", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "World"}, "<p>Hello World</p>. END."},
-		{"Hello <p>HAHAPREFIX-1HBHB12", "PREFIX", map[string]string{"HAHAPREFIX-1HBHB": "World"}, "Hello <p>World12"},
-		{"Hello HAHAP-1HBHB. HAHAP-1HBHB-HAHAP-1HBHB HAHAP-1HBHB HAHAP-1HBHB HAHAP-1HBHB END", "P", map[string]string{"HAHAP-1HBHB": strings.Repeat("BC", 100)},
+		{"Hello <p>HAHAHUGOSHORTCODE-1HBHB</p>. END.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, "Hello World. END."},
+		{"Hello <p>HAHAHUGOSHORTCODE-1HBHB</p>. <p>HAHAHUGOSHORTCODE-2HBHB</p> END.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World", "HAHAHUGOSHORTCODE-2HBHB": "THE"}, "Hello World. THE END."},
+		{"Hello <p>HAHAHUGOSHORTCODE-1HBHB. END</p>.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, "Hello <p>World. END</p>."},
+		{"<p>Hello HAHAHUGOSHORTCODE-1HBHB</p>. END.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, "<p>Hello World</p>. END."},
+		{"Hello <p>HAHAHUGOSHORTCODE-1HBHB12", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, "Hello <p>World12"},
+		{"Hello HAHAHUGOSHORTCODE-1HBHB. HAHAHUGOSHORTCODE-1HBHB-HAHAHUGOSHORTCODE-1HBHB HAHAHUGOSHORTCODE-1HBHB HAHAHUGOSHORTCODE-1HBHB HAHAHUGOSHORTCODE-1HBHB END", "P", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": strings.Repeat("BC", 100)},
 			fmt.Sprintf("Hello %s. %s-%s %s %s %s END",
 				strings.Repeat("BC", 100), strings.Repeat("BC", 100), strings.Repeat("BC", 100), strings.Repeat("BC", 100), strings.Repeat("BC", 100), strings.Repeat("BC", 100))},
 	} {
 
-		results, err := replaceShortcodeTokens([]byte(this.input), this.prefix, this.replacements)
+		results, err := replaceShortcodeTokens([]byte(this.input), this.replacements)
 
 		if b, ok := this.expect.(bool); ok && !b {
 			if err == nil {
@@ -883,16 +821,6 @@ func TestReplaceShortcodeTokens(t *testing.T) {
 
 }
 
-func TestScKey(t *testing.T) {
-	require.Equal(t, scKey{Suffix: "xml", ShortcodePlaceholder: "ABCD"},
-		newScKey(media.XMLType, "ABCD"))
-	require.Equal(t, scKey{Lang: "en", Suffix: "html", OutputFormat: "AMP", ShortcodePlaceholder: "EFGH"},
-		newScKeyFromLangAndOutputFormat("en", output.AMPFormat, "EFGH"))
-	require.Equal(t, scKey{Suffix: "html", ShortcodePlaceholder: "IJKL"},
-		newDefaultScKey("IJKL"))
-
-}
-
 func TestShortcodeGetContent(t *testing.T) {
 	t.Parallel()
 	assert := require.New(t)
@@ -950,7 +878,7 @@ C-%s`
 
 	builder.WithViper(v).WithContent(content...).WithTemplates(templates...).CreateSites().Build(BuildCfg{})
 	s := builder.H.Sites[0]
-	assert.Equal(3, len(s.RegularPages))
+	assert.Equal(3, len(s.RegularPages()))
 
 	builder.AssertFileContent("public/section1/index.html",
 		"List Content: <p>Logo:P1:|P2:logo.png/PNG logo|:P1: P1:|P2:docs1p1/<p>C-s1p1</p>\n|",
@@ -1017,7 +945,7 @@ weight: %d
 	builder.WithContent(content...).WithTemplatesAdded(shortcodes...).CreateSites().Build(BuildCfg{})
 
 	s := builder.H.Sites[0]
-	assert.Equal(3, len(s.RegularPages))
+	assert.Equal(3, len(s.RegularPages()))
 
 	builder.AssertFileContent("public/en/p1/index.html", `v1: 0 sgo: |v2: 1 sgo: 0|v3: 2 sgo: 1|v4: 3 sgo: 2|v5: 4 sgo: 3`)
 	builder.AssertFileContent("public/en/p1/index.html", `outer ordinal: 5 inner: 
@@ -1054,7 +982,7 @@ String: {{ . | safeHTML }}
 `).CreateSites().Build(BuildCfg{})
 
 	s := builder.H.Sites[0]
-	assert.Equal(1, len(s.RegularPages))
+	assert.Equal(1, len(s.RegularPages()))
 
 	builder.AssertFileContent("public/page/index.html",
 		filepath.FromSlash("File: content/page.md"),
diff --git a/hugolib/site.go b/hugolib/site.go
index 43b398b7059..a865914a130 100644
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -1,4 +1,4 @@
-// Copyright 2017 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -22,59 +22,54 @@ import (
 	"mime"
 	"net/url"
 	"os"
+	"path"
 	"path/filepath"
 	"sort"
 	"strconv"
 	"strings"
 	"time"
 
+	"github.com/gohugoio/hugo/common/maps"
+
 	"github.com/pkg/errors"
 
 	"github.com/gohugoio/hugo/common/text"
 
-	"github.com/gohugoio/hugo/hugofs"
-
-	"github.com/gohugoio/hugo/common/herrors"
-
 	"github.com/gohugoio/hugo/common/hugo"
-	"github.com/gohugoio/hugo/common/maps"
 	"github.com/gohugoio/hugo/publisher"
 	_errors "github.com/pkg/errors"
 
 	"github.com/gohugoio/hugo/langs"
 
-	src "github.com/gohugoio/hugo/source"
-
-	"golang.org/x/sync/errgroup"
+	"github.com/gohugoio/hugo/resources/page"
 
 	"github.com/gohugoio/hugo/config"
+	"github.com/gohugoio/hugo/lazy"
+	"golang.org/x/sync/errgroup"
 
 	"github.com/gohugoio/hugo/media"
-	"github.com/gohugoio/hugo/parser/metadecoders"
-
-	"github.com/markbates/inflect"
 
 	"github.com/fsnotify/fsnotify"
 	bp "github.com/gohugoio/hugo/bufferpool"
 	"github.com/gohugoio/hugo/deps"
 	"github.com/gohugoio/hugo/helpers"
-	"github.com/gohugoio/hugo/hugolib/pagemeta"
+	"github.com/gohugoio/hugo/navigation"
 	"github.com/gohugoio/hugo/output"
 	"github.com/gohugoio/hugo/related"
 	"github.com/gohugoio/hugo/resources"
+	"github.com/gohugoio/hugo/resources/page/pagemeta"
+	"github.com/gohugoio/hugo/resources/resource"
 	"github.com/gohugoio/hugo/source"
 	"github.com/gohugoio/hugo/tpl"
+
 	"github.com/spf13/afero"
 	"github.com/spf13/cast"
-	"github.com/spf13/nitro"
 	"github.com/spf13/viper"
 )
 
 // used to indicate if run as a test.
 var testMode bool
 
-var defaultTimer *nitro.B
-
 // Site contains all the information relevant for constructing a static
 // site.  The basic flow of information is as follows:
 //
@@ -93,34 +88,27 @@ var defaultTimer *nitro.B
 //
 // 5. The entire collection of files is written to disk.
 type Site struct {
-	owner *HugoSites
+
+	// The owning container. When multiple languages, there will be multiple
+	// sites.
+	h *HugoSites
 
 	*PageCollections
 
 	Taxonomies TaxonomyList
 
-	// Plural is what we get in the folder, so keep track of this mapping
-	// to get the singular form from that value.
-	taxonomiesPluralSingular map[string]string
-
-	// This is temporary, see https://github.com/gohugoio/hugo/issues/2835
-	// Maps 	"actors-gerard-depardieu" to "Gérard Depardieu" when preserveTaxonomyNames
-	// is set.
-	taxonomiesOrigKey map[string]string
+	taxonomyNodes taxonomyNodeInfos
 
 	Sections Taxonomy
 	Info     SiteInfo
-	Menus    Menus
-	timer    *nitro.B
 
 	layoutHandler *output.LayoutHandler
 
-	draftCount   int
-	futureCount  int
-	expiredCount int
+	buildStats *buildStats
 
-	Data     map[string]interface{}
-	Language *langs.Language
+	language *langs.Language
+
+	siteCfg siteConfigHolder
 
 	disabledKinds map[string]bool
 
@@ -137,7 +125,7 @@ type Site struct {
 	outputFormatsConfig output.Formats
 	mediaTypesConfig    media.Types
 
-	siteConfig SiteConfig
+	siteConfigConfig SiteConfig
 
 	// How to handle page front matter.
 	frontmatterHandler pagemeta.FrontMatterHandler
@@ -158,23 +146,163 @@ type Site struct {
 	// The func used to title case titles.
 	titleFunc func(s string) string
 
-	relatedDocsHandler *relatedDocsHandler
+	relatedDocsHandler *page.RelatedDocsHandler
 	siteRefLinker
-	// Set in some tests
-	shortcodePlaceholderFunc func() string
 
 	publisher publisher.Publisher
+
+	menus navigation.Menus
+
+	// Shortcut to the home page. Note that this may be nil if
+	// home page, for some odd reason, is disabled.
+	home *pageState
+
+	// The last modification date of this site.
+	lastmod time.Time
+
+	// Lazily loaded site dependencies
+	init *siteInit
+}
+
+type siteConfigHolder struct {
+	sitemap          config.Sitemap
+	taxonomiesConfig map[string]string
+	timeout          time.Duration
+	hasCJKLanguage   bool
+	enableEmoji      bool
+}
+
+// Lazily loaded site dependencies.
+type siteInit struct {
+	prevNext          *lazy.Init
+	prevNextInSection *lazy.Init
+	menus             *lazy.Init
+}
+
+func (init *siteInit) Reset() {
+	init.prevNext.Reset()
+	init.prevNextInSection.Reset()
+	init.menus.Reset()
+}
+
+func (s *Site) initInit(init *lazy.Init, pctx pageContext) {
+	_, err := init.Do()
+	if err != nil {
+		s.h.FatalError(pctx.wrapError(err))
+	}
+}
+
+func (s *Site) prepareInits() {
+	s.init = &siteInit{}
+
+	var init lazy.Init
+
+	s.init.prevNext = init.Branch(func() (interface{}, error) {
+		regularPages := s.findWorkPagesByKind(page.KindPage)
+		for i, p := range regularPages {
+			if p.posNextPrev == nil {
+				continue
+			}
+			p.posNextPrev.nextPage = nil
+			p.posNextPrev.prevPage = nil
+
+			if i > 0 {
+				p.posNextPrev.nextPage = regularPages[i-1]
+			}
+
+			if i < len(regularPages)-1 {
+				p.posNextPrev.prevPage = regularPages[i+1]
+			}
+		}
+		return nil, nil
+	})
+
+	s.init.prevNextInSection = init.Branch(func() (interface{}, error) {
+		var rootSection []int
+		for i, p1 := range s.workAllPages {
+			if p1.IsPage() && p1.Section() == "" {
+				rootSection = append(rootSection, i)
+			}
+			if p1.IsSection() && len(p1.SectionsEntries()) <= 1 {
+				sectionPages := p1.Pages()
+				for i, p2 := range sectionPages {
+					p2s := p2.(*pageState)
+					if p2s.posNextPrevSection == nil {
+						continue
+					}
+
+					p2s.posNextPrevSection.nextPage = nil
+					p2s.posNextPrevSection.prevPage = nil
+
+					if i > 0 {
+						p2s.posNextPrevSection.nextPage = sectionPages[i-1]
+					}
+
+					if i < len(sectionPages)-1 {
+						p2s.posNextPrevSection.prevPage = sectionPages[i+1]
+					}
+				}
+			}
+		}
+
+		for i, j := range rootSection {
+			p := s.workAllPages[j]
+			if i > 0 {
+				p.posNextPrevSection.nextPage = s.workAllPages[rootSection[i-1]]
+			}
+
+			if i < len(rootSection)-1 {
+				p.posNextPrevSection.prevPage = s.workAllPages[rootSection[i+1]]
+			}
+		}
+
+		return nil, nil
+	})
+
+	s.init.menus = init.Branch(func() (interface{}, error) {
+		s.assembleMenus()
+		return nil, nil
+	})
+
+}
+
+// Build stats for a given site.
+type buildStats struct {
+	draftCount   int
+	futureCount  int
+	expiredCount int
+}
+
+// TODO(bep) consolidate all site stats into this
+// TODO(bep) page check page stats (seems off)
+func (b *buildStats) update(p page.Page) {
+	if p.Draft() {
+		b.draftCount++
+	}
+
+	if resource.IsFuture(p) {
+		b.futureCount++
+	}
+
+	if resource.IsExpired(p) {
+		b.expiredCount++
+	}
 }
 
 type siteRenderingContext struct {
 	output.Format
 }
 
+func (s *Site) Menus() navigation.Menus {
+	s.init.menus.Do()
+	return s.menus
+}
+
 func (s *Site) initRenderFormats() {
 	formatSet := make(map[string]bool)
 	formats := output.Formats{}
-	for _, p := range s.Pages {
-		for _, f := range p.outputFormats {
+	for _, p := range s.workAllPages {
+		for _, f := range p.m.configuredOutputFormats {
 			if !formatSet[f.Name] {
 				formats = append(formats, f)
 				formatSet[f.Name] = true
@@ -182,10 +310,30 @@ func (s *Site) initRenderFormats() {
 		}
 	}
 
+	// Add the per kind configured output formats
+	for _, kind := range allKindsInPages {
+		if siteFormats, found := s.outputFormats[kind]; found {
+			for _, f := range siteFormats {
+				if !formatSet[f.Name] {
+					formats = append(formats, f)
+					formatSet[f.Name] = true
+				}
+			}
+		}
+	}
+
 	sort.Sort(formats)
 	s.renderFormats = formats
 }
 
+func (s *Site) GetRelatedDocsHandler() *page.RelatedDocsHandler {
+	return s.relatedDocsHandler
+}
+
+func (s *Site) Language() *langs.Language {
+	return s.language
+}
+
 func (s *Site) isEnabled(kind string) bool {
 	if kind == kindUnknown {
 		panic("Unknown kind")
@@ -199,19 +347,23 @@ func (s *Site) reset() *Site {
 		layoutHandler:          output.NewLayoutHandler(),
 		disabledKinds:          s.disabledKinds,
 		titleFunc:              s.titleFunc,
-		relatedDocsHandler:     newSearchIndexHandler(s.relatedDocsHandler.cfg),
+		relatedDocsHandler:     s.relatedDocsHandler.Clone(),
 		siteRefLinker:          s.siteRefLinker,
 		outputFormats:          s.outputFormats,
 		rc:                     s.rc,
 		outputFormatsConfig:    s.outputFormatsConfig,
 		frontmatterHandler:     s.frontmatterHandler,
 		mediaTypesConfig:       s.mediaTypesConfig,
-		Language:               s.Language,
-		owner:                  s.owner,
+		language:               s.language,
+		h:                      s.h,
 		publisher:              s.publisher,
-		siteConfig:             s.siteConfig,
+		siteConfigConfig:       s.siteConfigConfig,
 		enableInlineShortcodes: s.enableInlineShortcodes,
-		PageCollections:        newPageCollections()}
+		buildStats:             &buildStats{},
+		init:                   s.init,
+		PageCollections:        newPageCollections(),
+		siteCfg:                s.siteCfg,
+	}
 
 }
 
@@ -262,6 +414,8 @@ func newSite(cfg deps.DepsCfg) (*Site, error) {
 		return nil, err
 	}
 
+	taxonomies := cfg.Language.GetStringMapString("taxonomies")
+
 	var relatedContentConfig related.Config
 
 	if cfg.Language.IsSet("related") {
@@ -271,7 +425,6 @@ func newSite(cfg deps.DepsCfg) (*Site, error) {
 		}
 	} else {
 		relatedContentConfig = related.DefaultConfig
-		taxonomies := cfg.Language.GetStringMapString("taxonomies")
 		if _, found := taxonomies["tag"]; found {
 			relatedContentConfig.Add(related.IndexConfig{Name: "tags", Weight: 80})
 		}
@@ -284,21 +437,33 @@ func newSite(cfg deps.DepsCfg) (*Site, error) {
 		return nil, err
 	}
 
+	siteConfig := siteConfigHolder{
+		sitemap:          config.DecodeSitemap(config.Sitemap{Priority: -1, Filename: "sitemap.xml"}, cfg.Language.GetStringMap("sitemap")),
+		taxonomiesConfig: taxonomies,
+		timeout:          time.Duration(cfg.Language.GetInt("timeout")) * time.Millisecond,
+		hasCJKLanguage:   cfg.Language.GetBool("hasCJKLanguage"),
+		enableEmoji:      cfg.Language.Cfg.GetBool("enableEmoji"),
+	}
+
 	s := &Site{
 		PageCollections:        c,
 		layoutHandler:          output.NewLayoutHandler(),
-		Language:               cfg.Language,
+		language:               cfg.Language,
 		disabledKinds:          disabledKinds,
 		titleFunc:              titleFunc,
-		relatedDocsHandler:     newSearchIndexHandler(relatedContentConfig),
+		relatedDocsHandler:     page.NewRelatedDocsHandler(relatedContentConfig),
 		outputFormats:          outputFormats,
 		rc:                     &siteRenderingContext{output.HTMLFormat},
 		outputFormatsConfig:    siteOutputFormatsConfig,
 		mediaTypesConfig:       siteMediaTypesConfig,
 		frontmatterHandler:     frontMatterHandler,
+		buildStats:             &buildStats{},
 		enableInlineShortcodes: cfg.Language.GetBool("enableInlineShortcodes"),
+		siteCfg:                siteConfig,
 	}
 
+	s.prepareInits()
+
 	return s, nil
 
 }
@@ -372,52 +537,94 @@ func NewSiteForCfg(cfg deps.DepsCfg) (*Site, error) {
 
 }
 
-type SiteInfos []*SiteInfo
+type SiteInfo struct {
+	Authors page.AuthorList
+	Social  SiteSocial
 
-// First is a convenience method to get the first Site, i.e. the main language.
-func (s SiteInfos) First() *SiteInfo {
-	if len(s) == 0 {
-		return nil
-	}
-	return s[0]
-}
+	hugoInfo     hugo.Info
+	title        string
+	RSSLink      string
+	Author       map[string]interface{}
+	LanguageCode string
+	Copyright    string
+
+	permalinks map[string]string
+
+	LanguagePrefix string
+	Languages      langs.Languages
+
+	BuildDrafts bool
+
+	canonifyURLs bool
+	relativeURLs bool
+	uglyURLs     func(p page.Page) bool
 
-type SiteInfo struct {
-	Taxonomies TaxonomyList
-	Authors    AuthorList
-	Social     SiteSocial
-	*PageCollections
-	Menus                          *Menus
-	hugoInfo                       hugo.Info
-	Title                          string
-	RSSLink                        string
-	Author                         map[string]interface{}
-	LanguageCode                   string
-	Copyright                      string
-	LastChange                     time.Time
-	Permalinks                     PermalinkOverrides
-	Params                         map[string]interface{}
-	BuildDrafts                    bool
-	canonifyURLs                   bool
-	relativeURLs                   bool
-	uglyURLs                       func(p *Page) bool
-	preserveTaxonomyNames          bool
-	Data                           *map[string]interface{}
 	owner                          *HugoSites
 	s                              *Site
 	language                       *langs.Language
-	LanguagePrefix                 string
-	Languages                      langs.Languages
 	defaultContentLanguageInSubdir bool
 	sectionPagesMenu               string
 }
 
+func (s *SiteInfo) Pages() page.Pages {
+	return s.s.Pages()
+
+}
+
+func (s *SiteInfo) RegularPages() page.Pages {
+	return s.s.RegularPages()
+
+}
+
+func (s *SiteInfo) AllPages() page.Pages {
+	return s.s.AllPages()
+}
+
+func (s *SiteInfo) AllRegularPages() page.Pages {
+	return s.s.AllRegularPages()
+}
+
+func (s *SiteInfo) Permalinks() map[string]string {
+	// Remove in 0.57
+	helpers.Deprecated("Site", ".Permalinks", "", false)
+	return s.permalinks
+}
+
+func (s *SiteInfo) LastChange() time.Time {
+	return s.s.lastmod
+}
+
+func (s *SiteInfo) Title() string {
+	return s.title
+}
+
+func (s *SiteInfo) Site() page.Site {
+	return s
+}
+
+func (s *SiteInfo) Menus() navigation.Menus {
+	return s.s.Menus()
+}
+
+// TODO(bep) type
+func (s *SiteInfo) Taxonomies() interface{} {
+	return s.s.Taxonomies
+}
+
+func (s *SiteInfo) Params() map[string]interface{} {
+	return s.s.Language().Params()
+}
+
+func (s *SiteInfo) Data() map[string]interface{} {
+	return s.s.h.Data()
+}
+
 func (s *SiteInfo) Language() *langs.Language {
 	return s.language
 }
 
 func (s *SiteInfo) Config() SiteConfig {
-	return s.s.siteConfig
+	return s.s.siteConfigConfig
 }
 
 func (s *SiteInfo) Hugo() hugo.Info {
@@ -425,11 +632,12 @@ func (s *SiteInfo) Hugo() hugo.Info {
 }
 
 // Sites is a convenience method to get all the Hugo sites/languages configured.
-func (s *SiteInfo) Sites() SiteInfos {
-	return s.s.owner.siteInfos()
+func (s *SiteInfo) Sites() page.Sites {
+	return s.s.h.siteInfos()
 }
+
 func (s *SiteInfo) String() string {
-	return fmt.Sprintf("Site(%q)", s.Title)
+	return fmt.Sprintf("Site(%q)", s.title)
 }
 
 func (s *SiteInfo) BaseURL() template.URL {
@@ -484,7 +692,7 @@ func (s *SiteInfo) Param(key interface{}) (interface{}, error) {
 		return nil, err
 	}
 	keyStr = strings.ToLower(keyStr)
-	return s.Params[keyStr], nil
+	return s.Params()[keyStr], nil
 }
 
 func (s *SiteInfo) IsMultiLingual() bool {
@@ -513,28 +721,24 @@ func newSiteRefLinker(cfg config.Provider, s *Site) (siteRefLinker, error) {
 	return siteRefLinker{s: s, errorLogger: logger, notFoundURL: notFoundURL}, nil
 }
 
-func (s siteRefLinker) logNotFound(ref, what string, p *Page, position text.Position) {
+func (s siteRefLinker) logNotFound(ref, what string, p page.Page, position text.Position) {
 	if position.IsValid() {
 		s.errorLogger.Printf("[%s] REF_NOT_FOUND: Ref %q: %s: %s", s.s.Lang(), ref, position.String(), what)
 	} else if p == nil {
 		s.errorLogger.Printf("[%s] REF_NOT_FOUND: Ref %q: %s", s.s.Lang(), ref, what)
 	} else {
-		s.errorLogger.Printf("[%s] REF_NOT_FOUND: Ref %q from page %q: %s", s.s.Lang(), ref, p.pathOrTitle(), what)
+		s.errorLogger.Printf("[%s] REF_NOT_FOUND: Ref %q from page %q: %s", s.s.Lang(), ref, p.Path(), what)
 	}
 }
 
 func (s *siteRefLinker) refLink(ref string, source interface{}, relative bool, outputFormat string) (string, error) {
 
-	var page *Page
-	switch v := source.(type) {
-	case *Page:
-		page = v
-	case pageContainer:
-		page = v.page()
+	p, err := unwrapPage(source)
+	if err != nil {
+		return "", err
 	}
 
 	var refURL *url.URL
-	var err error
 
 	ref = filepath.ToSlash(ref)
 
@@ -544,11 +748,11 @@ func (s *siteRefLinker) refLink(ref string, source interface{}, relative bool, o
 		return s.notFoundURL, err
 	}
 
-	var target *Page
+	var target page.Page
 	var link string
 
 	if refURL.Path != "" {
-		target, err := s.s.getPageNew(page, refURL.Path)
+		target, err := s.s.getPageNew(p, refURL.Path)
 		var pos text.Position
 		if err != nil || target == nil {
 			if p, ok := source.(text.Positioner); ok {
@@ -558,12 +762,12 @@ func (s *siteRefLinker) refLink(ref string, source interface{}, relative bool, o
 		}
 
 		if err != nil {
-			s.logNotFound(refURL.Path, err.Error(), page, pos)
+			s.logNotFound(refURL.Path, err.Error(), p, pos)
 			return s.notFoundURL, nil
 		}
 
 		if target == nil {
-			s.logNotFound(refURL.Path, "page not found", page, pos)
+			s.logNotFound(refURL.Path, "page not found", p, pos)
 			return s.notFoundURL, nil
 		}
 
@@ -573,7 +777,7 @@ func (s *siteRefLinker) refLink(ref string, source interface{}, relative bool, o
 			o := target.OutputFormats().Get(outputFormat)
 
 			if o == nil {
-				s.logNotFound(refURL.Path, fmt.Sprintf("output format %q", outputFormat), page, pos)
+				s.logNotFound(refURL.Path, fmt.Sprintf("output format %q", outputFormat), p, pos)
 				return s.notFoundURL, nil
 			}
 			permalinker = o
@@ -587,22 +791,24 @@ func (s *siteRefLinker) refLink(ref string, source interface{}, relative bool, o
 	}
 
 	if refURL.Fragment != "" {
+		_ = target
 		link = link + "#" + refURL.Fragment
-
-		if refURL.Path != "" && target != nil && !target.getRenderingConfig().PlainIDAnchors {
-			link = link + ":" + target.UniqueID()
-		} else if page != nil && !page.getRenderingConfig().PlainIDAnchors {
-			link = link + ":" + page.UniqueID()
+		if pctx, ok := target.(pageContext); ok && target.File() != nil && !pctx.getRenderingConfig().PlainIDAnchors {
+			if refURL.Path != "" {
+				link = link + ":" + target.File().UniqueID()
+			}
+		} else if pctx, ok := p.(pageContext); ok && p.File() != nil && !pctx.getRenderingConfig().PlainIDAnchors {
+			link = link + ":" + p.File().UniqueID()
 		}
-	}
 
+	}
 	return link, nil
 }
 
 // Ref will give an absolute URL to ref in the given Page.
-func (s *SiteInfo) Ref(ref string, page *Page, options ...string) (string, error) {
-	// Remove in Hugo 0.53
-	helpers.Deprecated("Site", ".Ref", "Use .Site.GetPage", false)
+func (s *SiteInfo) Ref(ref string, page page.Page, options ...string) (string, error) {
+	// Remove in Hugo 0.54
+	helpers.Deprecated("Site", ".Ref", "Use .Site.GetPage", true)
 	outputFormat := ""
 	if len(options) > 0 {
 		outputFormat = options[0]
@@ -612,9 +818,9 @@ func (s *SiteInfo) Ref(ref string, page *Page, options ...string) (string, error
 }
 
 // RelRef will give an relative URL to ref in the given Page.
-func (s *SiteInfo) RelRef(ref string, page *Page, options ...string) (string, error) {
-	// Remove in Hugo 0.53
-	helpers.Deprecated("Site", ".RelRef", "Use .Site.GetPage", false)
+func (s *SiteInfo) RelRef(ref string, page page.Page, options ...string) (string, error) {
+	// Remove in Hugo 0.54
+	helpers.Deprecated("Site", ".RelRef", "Use .Site.GetPage", true)
 	outputFormat := ""
 	if len(options) > 0 {
 		outputFormat = options[0]
@@ -624,22 +830,11 @@ func (s *SiteInfo) RelRef(ref string, page *Page, options ...string) (string, er
 }
 
 func (s *Site) running() bool {
-	return s.owner != nil && s.owner.running
+	return s.h != nil && s.h.running
 }
 
 func (s *Site) multilingual() *Multilingual {
-	return s.owner.multilingual
-}
-
-func init() {
-	defaultTimer = nitro.Initalize()
-}
-
-func (s *Site) timerStep(step string) {
-	if s.timer == nil {
-		s.timer = defaultTimer
-	}
-	s.timer.Step(step)
+	return s.h.multilingual
 }
 
 type whatChanged struct {
@@ -689,6 +884,8 @@ func (s *Site) filterFileEvents(events []fsnotify.Event) []fsnotify.Event {
 	return filtered
 }
 
+// TODO(bep) page check Page's .Hugo is deprecated and will be removed in a future release. Use the global hugo function.
+
 func (s *Site) translateFileEvents(events []fsnotify.Event) []fsnotify.Event {
 	var filtered []fsnotify.Event
 
@@ -737,9 +934,7 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
 
 	s.Log.DEBUG.Printf("Rebuild for events %q", events)
 
-	h := s.owner
-
-	s.timerStep("initialize rebuild")
+	h := s.h
 
 	// First we need to determine what changed
 
@@ -771,7 +966,6 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
 			tmplChanged = append(tmplChanged, ev)
 
 			if strings.Contains(ev.Name, "shortcodes") {
-				clearIsInnerShortcodeCache()
 				shortcode := filepath.Base(ev.Name)
 				shortcode = strings.TrimSuffix(shortcode, filepath.Ext(shortcode))
 				shortcodesChanged[shortcode] = true
@@ -788,14 +982,16 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
 	}
 
 	// These in memory resource caches will be rebuilt on demand.
-	for _, s := range s.owner.Sites {
+	for _, s := range s.h.Sites {
 		s.ResourceSpec.ResourceCache.DeletePartitions(cachePartitions...)
 	}
 
 	if len(tmplChanged) > 0 || len(i18nChanged) > 0 {
-		sites := s.owner.Sites
+		sites := s.h.Sites
 		first := sites[0]
 
+		s.h.init.Reset()
+
 		// TOD(bep) globals clean
 		if err := first.Deps.LoadResources(); err != nil {
 			return whatChanged{}, err
@@ -805,7 +1001,7 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
 			site := sites[i]
 			var err error
 			depsCfg := deps.DepsCfg{
-				Language:      site.Language,
+				Language:      site.language,
 				MediaTypes:    site.mediaTypesConfig,
 				OutputFormats: site.outputFormatsConfig,
 			}
@@ -817,14 +1013,10 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
 				return whatChanged{}, err
 			}
 		}
-
-		s.timerStep("template prep")
 	}
 
 	if len(dataChanged) > 0 {
-		if err := s.readDataFromSourceFS(); err != nil {
-			return whatChanged{}, err
-		}
+		s.h.init.data.Reset()
 	}
 
 	for _, ev := range sourceChanged {
@@ -860,7 +1052,7 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
 		// pages that keeps a reference to the changed shortcode.
 		pagesWithShortcode := h.findPagesByShortcode(shortcode)
 		for _, p := range pagesWithShortcode {
-			contentFilesChanged = append(contentFilesChanged, p.File.Filename())
+			contentFilesChanged = append(contentFilesChanged, p.File().Filename())
 		}
 	}
 
@@ -891,193 +1083,72 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
 
 }
 
-func (s *Site) loadData(fs afero.Fs) (err error) {
-	spec := src.NewSourceSpec(s.PathSpec, fs)
-	fileSystem := spec.NewFilesystem("")
-	s.Data = make(map[string]interface{})
-	for _, r := range fileSystem.Files() {
-		if err := s.handleDataFile(r); err != nil {
-			return err
-		}
+func (s *Site) process(config BuildCfg) (err error) {
+	if err = s.initialize(); err != nil {
+		return
 	}
-
-	return
-}
-
-func (s *Site) errWithFileContext(err error, f source.File) error {
-	rfi, ok := f.FileInfo().(hugofs.RealFilenameInfo)
-	if !ok {
+	if err := s.readAndProcessContent(); err != nil {
 		return err
 	}
-
-	realFilename := rfi.RealFilename()
-
-	err, _ = herrors.WithFileContextForFile(
-		err,
-		realFilename,
-		realFilename,
-		s.SourceSpec.Fs.Source,
-		herrors.SimpleLineMatcher)
-
 	return err
-}
-
-func (s *Site) handleDataFile(r source.ReadableFile) error {
-	var current map[string]interface{}
-
-	f, err := r.Open()
-	if err != nil {
-		return _errors.Wrapf(err, "Failed to open data file %q:", r.LogicalName())
-	}
-	defer f.Close()
-
-	// Crawl in data tree to insert data
-	current = s.Data
-	keyParts := strings.Split(r.Dir(), helpers.FilePathSeparator)
-	// The first path element is the virtual folder (typically theme name), which is
-	// not part of the key.
-	if len(keyParts) > 1 {
-		for _, key := range keyParts[1:] {
-			if key != "" {
-				if _, ok := current[key]; !ok {
-					current[key] = make(map[string]interface{})
-				}
-				current = current[key].(map[string]interface{})
-			}
-		}
-	}
 
-	data, err := s.readData(r)
-	if err != nil {
-		return s.errWithFileContext(err, r)
-	}
-
-	if data == nil {
-		return nil
-	}
-
-	// filepath.Walk walks the files in lexical order, '/' comes before '.'
-	// this warning could happen if
-	// 1. A theme uses the same key; the main data folder wins
-	// 2. A sub folder uses the same key: the sub folder wins
-	higherPrecedentData := current[r.BaseFileName()]
-
-	switch data.(type) {
-	case nil:
-		// hear the crickets?
-
-	case map[string]interface{}:
-
-		switch higherPrecedentData.(type) {
-		case nil:
-			current[r.BaseFileName()] = data
-		case map[string]interface{}:
-			// merge maps: insert entries from data for keys that
-			// don't already exist in higherPrecedentData
-			higherPrecedentMap := higherPrecedentData.(map[string]interface{})
-			for key, value := range data.(map[string]interface{}) {
-				if _, exists := higherPrecedentMap[key]; exists {
-					s.Log.WARN.Printf("Data for key '%s' in path '%s' is overridden by higher precedence data already in the data tree", key, r.Path())
-				} else {
-					higherPrecedentMap[key] = value
-				}
-			}
-		default:
-			// can't merge: higherPrecedentData is not a map
-			s.Log.WARN.Printf("The %T data from '%s' overridden by "+
-				"higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData)
-		}
-
-	case []interface{}:
-		if higherPrecedentData == nil {
-			current[r.BaseFileName()] = data
-		} else {
-			// we don't merge array data
-			s.Log.WARN.Printf("The %T data from '%s' overridden by "+
-				"higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData)
-		}
-
-	default:
-		s.Log.ERROR.Printf("unexpected data type %T in file %s", data, r.LogicalName())
-	}
-
-	return nil
 }
 
-func (s *Site) readData(f source.ReadableFile) (interface{}, error) {
-	file, err := f.Open()
-	if err != nil {
-		return nil, _errors.Wrap(err, "readData: failed to open data file")
+func (s *Site) setupSitePages() {
+	var homeDates *resource.Dates
+	if s.home != nil {
+		// If the home page has no dates set, we fall back to the site dates.
+		homeDates = &s.home.m.Dates
 	}
-	defer file.Close()
-	content := helpers.ReaderToBytes(file)
-
-	format := metadecoders.FormatFromString(f.Extension())
-	return metadecoders.Default.Unmarshal(content, format)
-}
-
-func (s *Site) readDataFromSourceFS() error {
-	err := s.loadData(s.PathSpec.BaseFs.Data.Fs)
-	s.timerStep("load data")
-	return err
-}
 
-func (s *Site) process(config BuildCfg) (err error) {
-	if err = s.initialize(); err != nil {
+	if !s.lastmod.IsZero() && (homeDates == nil || !resource.IsZeroDates(homeDates)) {
 		return
 	}
-	s.timerStep("initialize")
 
-	if err = s.readDataFromSourceFS(); err != nil {
+	if homeDates != nil && !s.lastmod.IsZero() {
+		homeDates.FDate = s.lastmod
+		homeDates.FLastmod = s.lastmod
 		return
-	}
-
-	s.timerStep("load i18n")
 
-	if err := s.readAndProcessContent(); err != nil {
-		return err
 	}
-	s.timerStep("read and convert pages from source")
 
-	return err
-
-}
+	var siteLastmod time.Time
+	var siteLastDate time.Time
 
-func (s *Site) setupSitePages() {
-	var siteLastChange time.Time
-
-	for i, page := range s.RegularPages {
-		if i > 0 {
-			page.NextPage = s.RegularPages[i-1]
-		}
-
-		if i < len(s.RegularPages)-1 {
-			page.PrevPage = s.RegularPages[i+1]
+	for _, page := range s.workAllPages {
+		if !page.IsPage() {
+			continue
 		}
-
 		// Determine Site.Info.LastChange
 		// Note that the logic to determine which date to use for Lastmod
 		// is already applied, so this is *the* date to use.
 		// We cannot just pick the last page in the default sort, because
 		// that may not be ordered by date.
-		if page.Lastmod.After(siteLastChange) {
-			siteLastChange = page.Lastmod
+		// TODO(bep) check if this can be done earlier
+		if page.Lastmod().After(siteLastmod) {
+			siteLastmod = page.Lastmod()
+		}
+		if page.Date().After(siteLastDate) {
+			siteLastDate = page.Date()
 		}
 	}
 
-	s.Info.LastChange = siteLastChange
+	s.lastmod = siteLastmod
+
+	if homeDates != nil && resource.IsZeroDates(homeDates) {
+		homeDates.FDate = siteLastDate
+		homeDates.FLastmod = s.lastmod
+	}
+
 }
 
-func (s *Site) render(config *BuildCfg, outFormatIdx int) (err error) {
-	// Clear the global page cache.
-	spc.clear()
+func (s *Site) render(ctx *siteRenderContext) (err error) {
 
-	if outFormatIdx == 0 {
-		if err = s.preparePages(); err != nil {
-			return
-		}
-		s.timerStep("prepare pages")
+	if err := page.Clear(); err != nil {
+		return err
+	}
 
+	if ctx.outIdx == 0 {
 		// Note that even if disableAliases is set, the aliases themselves are
 		// preserved on page. The motivation with this is to be able to generate
 		// 301 redirects in a .htacess file and similar using a custom output format.
@@ -1089,36 +1160,35 @@ func (s *Site) render(config *BuildCfg, outFormatIdx int) (err error) {
 			if err = s.renderAliases(); err != nil {
 				return
 			}
-			s.timerStep("render and write aliases")
 		}
 
 	}
 
-	if err = s.renderPages(config); err != nil {
+	if err = s.renderPages(ctx); err != nil {
 		return
 	}
 
-	s.timerStep("render and write pages")
+	if ctx.outIdx == 0 {
+		if err = s.renderSitemap(); err != nil {
+			return
+		}
 
-	// TODO(bep) render consider this, ref. render404 etc.
-	if outFormatIdx > 0 {
-		return
-	}
+		if err = s.renderRobotsTXT(); err != nil {
+			return
+		}
 
-	if err = s.renderSitemap(); err != nil {
-		return
+		if err = s.render404(); err != nil {
+			return
+		}
 	}
-	s.timerStep("render and write Sitemap")
 
-	if err = s.renderRobotsTXT(); err != nil {
+	if !ctx.renderSingletonPages() {
 		return
 	}
-	s.timerStep("render and write robots.txt")
 
-	if err = s.render404(); err != nil {
+	if err = s.renderMainLanguageRedirect(); err != nil {
 		return
 	}
-	s.timerStep("render and write 404")
 
 	return
 }
@@ -1128,8 +1198,6 @@ func (s *Site) Initialise() (err error) {
 }
 
 func (s *Site) initialize() (err error) {
-	s.Menus = Menus{}
-
 	return s.initializeSiteInfo()
 }
 
@@ -1144,31 +1212,25 @@ func (s *SiteInfo) HomeAbsURL() string {
 
 // SitemapAbsURL is a convenience method giving the absolute URL to the sitemap.
 func (s *SiteInfo) SitemapAbsURL() string {
-	sitemapDefault := parseSitemap(s.s.Cfg.GetStringMap("sitemap"))
 	p := s.HomeAbsURL()
 	if !strings.HasSuffix(p, "/") {
 		p += "/"
 	}
-	p += sitemapDefault.Filename
+	p += s.s.siteCfg.sitemap.Filename
 	return p
 }
 
 func (s *Site) initializeSiteInfo() error {
 	var (
-		lang      = s.Language
+		lang      = s.language
 		languages langs.Languages
 	)
 
-	if s.owner != nil && s.owner.multilingual != nil {
-		languages = s.owner.multilingual.Languages
+	if s.h != nil && s.h.multilingual != nil {
+		languages = s.h.multilingual.Languages
 	}
 
-	params := lang.Params()
-
-	permalinks := make(PermalinkOverrides)
-	for k, v := range s.Cfg.GetStringMapString("permalinks") {
-		permalinks[k] = pathPattern(v)
-	}
+	permalinks := s.Cfg.GetStringMapString("permalinks")
 
 	defaultContentInSubDir := s.Cfg.GetBool("defaultContentLanguageInSubdir")
 	defaultContentLanguage := s.Cfg.GetString("defaultContentLanguage")
@@ -1178,7 +1240,7 @@ func (s *Site) initializeSiteInfo() error {
 		languagePrefix = "/" + lang.Lang
 	}
 
-	var uglyURLs = func(p *Page) bool {
+	var uglyURLs = func(p page.Page) bool {
 		return false
 	}
 
@@ -1186,25 +1248,25 @@ func (s *Site) initializeSiteInfo() error {
 	if v != nil {
 		switch vv := v.(type) {
 		case bool:
-			uglyURLs = func(p *Page) bool {
+			uglyURLs = func(p page.Page) bool {
 				return vv
 			}
 		case string:
 			// Is what be get from CLI (--uglyURLs)
 			vvv := cast.ToBool(vv)
-			uglyURLs = func(p *Page) bool {
+			uglyURLs = func(p page.Page) bool {
 				return vvv
 			}
 		default:
 			m := cast.ToStringMapBool(v)
-			uglyURLs = func(p *Page) bool {
+			uglyURLs = func(p page.Page) bool {
 				return m[p.Section()]
 			}
 		}
 	}
 
 	s.Info = SiteInfo{
-		Title:                          lang.GetString("title"),
+		title:                          lang.GetString("title"),
 		Author:                         lang.GetStringMap("author"),
 		Social:                         lang.GetStringMapString("social"),
 		LanguageCode:                   lang.GetString("languageCode"),
@@ -1218,20 +1280,13 @@ func (s *Site) initializeSiteInfo() error {
 		canonifyURLs:                   s.Cfg.GetBool("canonifyURLs"),
 		relativeURLs:                   s.Cfg.GetBool("relativeURLs"),
 		uglyURLs:                       uglyURLs,
-		preserveTaxonomyNames:          lang.GetBool("preserveTaxonomyNames"),
-		PageCollections:                s.PageCollections,
-		Menus:                          &s.Menus,
-		Params:                         params,
-		Permalinks:                     permalinks,
-		Data:                           &s.Data,
-		owner:                          s.owner,
+		permalinks:                     permalinks,
+		owner:                          s.h,
 		s:                              s,
 		hugoInfo:                       hugo.NewInfo(s.Cfg.GetString("environment")),
-		// TODO(bep) make this Menu and similar into delegate methods on SiteInfo
-		Taxonomies: s.Taxonomies,
 	}
 
-	rssOutputFormat, found := s.outputFormats[KindHome].GetByName(output.RSSFormat.Name)
+	rssOutputFormat, found := s.outputFormats[page.KindHome].GetByName(output.RSSFormat.Name)
 
 	if found {
 		s.Info.RSSLink = s.permalink(rssOutputFormat.BaseFilename())
@@ -1252,10 +1307,6 @@ func (s *Site) isLayoutDirEvent(e fsnotify.Event) bool {
 	return s.BaseFs.SourceFilesystems.IsLayout(e.Name)
 }
 
-func (s *Site) absContentDir() string {
-	return s.PathSpec.AbsPathify(s.PathSpec.ContentDir)
-}
-
 func (s *Site) isContentDirEvent(e fsnotify.Event) bool {
 	return s.BaseFs.IsContent(e.Name)
 }
@@ -1286,13 +1337,13 @@ func (c *contentCaptureResultHandler) handleBundles(d *bundleDirs) {
 	}
 }
 
-func (c *contentCaptureResultHandler) handleCopyFiles(files ...pathLangFile) {
-	for _, proc := range c.contentProcessors {
-		proc.processAssets(files)
-	}
+func (c *contentCaptureResultHandler) handleCopyFile(f pathLangFile) {
+	proc := c.getContentProcessor(f.Lang())
+	proc.processAsset(f)
 }
 
 func (s *Site) readAndProcessContent(filenames ...string) error {
+
 	ctx := context.Background()
 	g, ctx := errgroup.WithContext(ctx)
 
@@ -1300,9 +1351,9 @@ func (s *Site) readAndProcessContent(filenames ...string) error {
 
 	contentProcessors := make(map[string]*siteContentProcessor)
 	var defaultContentProcessor *siteContentProcessor
-	sites := s.owner.langSite()
+	sites := s.h.langSite()
 	for k, v := range sites {
-		if v.Language.Disabled {
+		if v.language.Disabled {
 			continue
 		}
 		proc := newSiteContentProcessor(ctx, len(filenames) > 0, v)
@@ -1326,7 +1377,7 @@ func (s *Site) readAndProcessContent(filenames ...string) error {
 
 	if s.running() {
 		// Need to track changes.
-		bundleMap = s.owner.ContentChanges
+		bundleMap = s.h.ContentChanges
 		handler = &captureResultHandlerChain{handlers: []captureBundlesHandler{mainHandler, bundleMap}}
 
 	} else {
@@ -1349,28 +1400,11 @@ func (s *Site) readAndProcessContent(filenames ...string) error {
 	return err2
 }
 
-func (s *Site) buildSiteMeta() (err error) {
-	defer s.timerStep("build Site meta")
+func (s *Site) getMenusFromConfig() navigation.Menus {
 
-	if len(s.Pages) == 0 {
-		return
-	}
-
-	s.assembleTaxonomies()
-
-	for _, p := range s.AllPages {
-		// this depends on taxonomies
-		p.setValuesForKind(s)
-	}
-
-	return
-}
-
-func (s *Site) getMenusFromConfig() Menus {
+	ret := navigation.Menus{}
 
-	ret := Menus{}
-
-	if menus := s.Language.GetStringMap("menus"); menus != nil {
+	if menus := s.language.GetStringMap("menus"); menus != nil {
 		for name, menu := range menus {
 			m, err := cast.ToSliceE(menu)
 			if err != nil {
@@ -1380,20 +1414,20 @@ func (s *Site) getMenusFromConfig() Menus {
 				for _, entry := range m {
 					s.Log.DEBUG.Printf("found menu: %q, in site config\n", name)
 
-					menuEntry := MenuEntry{Menu: name}
+					menuEntry := navigation.MenuEntry{Menu: name}
 					ime, err := cast.ToStringMapE(entry)
 					if err != nil {
 						s.Log.ERROR.Printf("unable to process menus in site config\n")
 						s.Log.ERROR.Println(err)
 					}
 
-					menuEntry.marshallMap(ime)
+					menuEntry.MarshallMap(ime)
 					menuEntry.URL = s.Info.createNodeMenuEntryURL(menuEntry.URL)
 
 					if ret[name] == nil {
-						ret[name] = &Menu{}
+						ret[name] = navigation.Menu{}
 					}
-					*ret[name] = ret[name].add(&menuEntry)
+					ret[name] = ret[name].Add(&menuEntry)
 				}
 			}
 		}
@@ -1417,28 +1451,27 @@ func (s *SiteInfo) createNodeMenuEntryURL(in string) string {
 }
 
 func (s *Site) assembleMenus() {
-	s.Menus = Menus{}
+	s.menus = make(navigation.Menus)
 
 	type twoD struct {
 		MenuName, EntryName string
 	}
-	flat := map[twoD]*MenuEntry{}
-	children := map[twoD]Menu{}
+	flat := map[twoD]*navigation.MenuEntry{}
+	children := map[twoD]navigation.Menu{}
 
 	// add menu entries from config to flat hash
 	menuConfig := s.getMenusFromConfig()
 	for name, menu := range menuConfig {
-		for _, me := range *menu {
+		for _, me := range menu {
 			flat[twoD{name, me.KeyName()}] = me
 		}
 	}
 
 	sectionPagesMenu := s.Info.sectionPagesMenu
-	pages := s.Pages
 
 	if sectionPagesMenu != "" {
-		for _, p := range pages {
-			if p.Kind == KindSection {
+		for _, p := range s.workAllPages {
+			if p.Kind() == page.KindSection {
 				// From Hugo 0.22 we have nested sections, but until we get a
 				// feel of how that would work in this setting, let us keep
 				// this menu for the top level only.
@@ -1447,9 +1480,9 @@ func (s *Site) assembleMenus() {
 					continue
 				}
 
-				me := MenuEntry{Identifier: id,
+				me := navigation.MenuEntry{Identifier: id,
 					Name:   p.LinkTitle(),
-					Weight: p.Weight,
+					Weight: p.Weight(),
 					URL:    p.RelPermalink()}
 				flat[twoD{sectionPagesMenu, me.KeyName()}] = &me
 			}
@@ -1457,10 +1490,10 @@ func (s *Site) assembleMenus() {
 	}
 
 	// Add menu entries provided by pages
-	for _, p := range pages {
-		for name, me := range p.Menus() {
+	for _, p := range s.workAllPages {
+		for name, me := range p.pageMenus.menus() {
 			if _, ok := flat[twoD{name, me.KeyName()}]; ok {
-				s.SendError(p.errWithFileContext(errors.Errorf("duplicate menu entry with identifier %q in menu %q", me.KeyName(), name)))
+				s.SendError(p.wrapError(errors.Errorf("duplicate menu entry with identifier %q in menu %q", me.KeyName(), name)))
 				continue
 			}
 			flat[twoD{name, me.KeyName()}] = me
@@ -1470,7 +1503,7 @@ func (s *Site) assembleMenus() {
 	// Create Children Menus First
 	for _, e := range flat {
 		if e.Parent != "" {
-			children[twoD{e.Menu, e.Parent}] = children[twoD{e.Menu, e.Parent}].add(e)
+			children[twoD{e.Menu, e.Parent}] = children[twoD{e.Menu, e.Parent}].Add(e)
 		}
 	}
 
@@ -1479,7 +1512,7 @@ func (s *Site) assembleMenus() {
 		_, ok := flat[twoD{p.MenuName, p.EntryName}]
 		if !ok {
 			// if parent does not exist, create one without a URL
-			flat[twoD{p.MenuName, p.EntryName}] = &MenuEntry{Name: p.EntryName, URL: ""}
+			flat[twoD{p.MenuName, p.EntryName}] = &navigation.MenuEntry{Name: p.EntryName, URL: ""}
 		}
 		flat[twoD{p.MenuName, p.EntryName}].Children = childmenu
 	}
@@ -1487,131 +1520,136 @@ func (s *Site) assembleMenus() {
 	// Assembling Top Level of Tree
 	for menu, e := range flat {
 		if e.Parent == "" {
-			_, ok := s.Menus[menu.MenuName]
+			_, ok := s.menus[menu.MenuName]
 			if !ok {
-				s.Menus[menu.MenuName] = &Menu{}
+				s.menus[menu.MenuName] = navigation.Menu{}
 			}
-			*s.Menus[menu.MenuName] = s.Menus[menu.MenuName].add(e)
+			s.menus[menu.MenuName] = s.menus[menu.MenuName].Add(e)
 		}
 	}
 }
 
+// get any lanaguagecode to prefix the target file path with.
+func (s *Site) getLanguageTargetPathLang(alwaysInSubDir bool) string {
+	if s.h.IsMultihost() {
+		return s.Language().Lang
+	}
+
+	return s.getLanguagePermalinkLang(alwaysInSubDir)
+}
+
+// get any lanaguagecode to prefix the relative permalink with.
+func (s *Site) getLanguagePermalinkLang(alwaysInSubDir bool) string {
+
+	if !s.Info.IsMultiLingual() || s.h.IsMultihost() {
+		return ""
+	}
+
+	if alwaysInSubDir {
+		return s.Language().Lang
+	}
+
+	isDefault := s.Language().Lang == s.multilingual().DefaultLang.Lang
+
+	if !isDefault || s.Info.defaultContentLanguageInSubdir {
+		return s.Language().Lang
+	}
+
+	return ""
+}
+
 func (s *Site) getTaxonomyKey(key string) string {
-	if s.Info.preserveTaxonomyNames {
-		// Keep as is
-		return key
+	if s.PathSpec.DisablePathToLower {
+		return s.PathSpec.MakePath(key)
 	}
-	return s.PathSpec.MakePathSanitized(key)
+	return strings.ToLower(s.PathSpec.MakePath(key))
 }
 
-// We need to create the top level taxonomy early in the build process
-// to be able to determine the page Kind correctly.
-func (s *Site) createTaxonomiesEntries() {
+func (s *Site) assembleTaxonomies() error {
 	s.Taxonomies = make(TaxonomyList)
-	taxonomies := s.Language.GetStringMapString("taxonomies")
+	taxonomies := s.siteCfg.taxonomiesConfig
 	for _, plural := range taxonomies {
 		s.Taxonomies[plural] = make(Taxonomy)
 	}
-}
-
-func (s *Site) assembleTaxonomies() {
-	s.taxonomiesPluralSingular = make(map[string]string)
-	s.taxonomiesOrigKey = make(map[string]string)
 
-	taxonomies := s.Language.GetStringMapString("taxonomies")
+	s.taxonomyNodes = make(taxonomyNodeInfos)
 
 	s.Log.INFO.Printf("found taxonomies: %#v\n", taxonomies)
 
 	for singular, plural := range taxonomies {
-		s.taxonomiesPluralSingular[plural] = singular
+		parent := s.taxonomyNodes.GetOrCreate(plural, "", "")
+		parent.singular = singular
+
+		addTaxonomy := func(plural, term string, weight int, p page.Page) {
+			key := s.getTaxonomyKey(term)
 
-		for _, p := range s.Pages {
-			vals := p.getParam(plural, !s.Info.preserveTaxonomyNames)
+			n := s.taxonomyNodes.GetOrCreate(plural, key, term)
+			n.parent = parent
+
+			// There may be different spellings before normalization, so the
+			// last one will win, e.g. "hugo" vs "Hugo".
+			n.term = term
+
+			w := page.NewWeightedPage(weight, p, n.getOwner)
+
+			s.Taxonomies[plural].add(key, w)
+
+			n.UpdateFromPage(w.Page)
+			parent.UpdateFromPage(w.Page)
+		}
 
-			w := p.getParamToLower(plural + "_weight")
+		for _, p := range s.workAllPages {
+			vals := getParam(p, plural, false)
+
+			w := getParamToLower(p, plural+"_weight")
 			weight, err := cast.ToIntE(w)
 			if err != nil {
-				s.Log.ERROR.Printf("Unable to convert taxonomy weight %#v to int for %s", w, p.File.Path())
+				s.Log.ERROR.Printf("Unable to convert taxonomy weight %#v to int for %q", w, p.pathOrTitle())
 				// weight will equal zero, so let the flow continue
 			}
 
 			if vals != nil {
 				if v, ok := vals.([]string); ok {
 					for _, idx := range v {
-						x := WeightedPage{weight, p}
-						s.Taxonomies[plural].add(s.getTaxonomyKey(idx), x)
-						if s.Info.preserveTaxonomyNames {
-							// Need to track the original
-							s.taxonomiesOrigKey[fmt.Sprintf("%s-%s", plural, s.PathSpec.MakePathSanitized(idx))] = idx
-						}
+						addTaxonomy(plural, idx, weight, p)
 					}
 				} else if v, ok := vals.(string); ok {
-					x := WeightedPage{weight, p}
-					s.Taxonomies[plural].add(s.getTaxonomyKey(v), x)
-					if s.Info.preserveTaxonomyNames {
-						// Need to track the original
-						s.taxonomiesOrigKey[fmt.Sprintf("%s-%s", plural, s.PathSpec.MakePathSanitized(v))] = v
-					}
+					addTaxonomy(plural, v, weight, p)
 				} else {
-					s.Log.ERROR.Printf("Invalid %s in %s\n", plural, p.File.Path())
+					s.Log.ERROR.Printf("Invalid %s in %q\n", plural, p.pathOrTitle())
 				}
 			}
 		}
+
 		for k := range s.Taxonomies[plural] {
 			s.Taxonomies[plural][k].Sort()
 		}
 	}
 
-	s.Info.Taxonomies = s.Taxonomies
+	return nil
 }
 
 // Prepare site for a new full build.
 func (s *Site) resetBuildState() {
-
-	s.relatedDocsHandler = newSearchIndexHandler(s.relatedDocsHandler.cfg)
+	s.relatedDocsHandler = s.relatedDocsHandler.Clone()
 	s.PageCollections = newPageCollectionsFromPages(s.rawAllPages)
-	// TODO(bep) get rid of this double
-	s.Info.PageCollections = s.PageCollections
-
-	s.draftCount = 0
-	s.futureCount = 0
-
-	s.expiredCount = 0
+	s.buildStats = &buildStats{}
+	s.init.Reset()
 
 	for _, p := range s.rawAllPages {
-		p.subSections = Pages{}
+		p.subSections = page.Pages{}
 		p.parent = nil
-		p.scratch = maps.NewScratch()
-		p.mainPageOutput = nil
+		p.Scratcher = maps.NewScratcher()
 	}
 }
 
-func (s *Site) layouts(p *PageOutput) ([]string, error) {
-	return s.layoutHandler.For(p.layoutDescriptor, p.outputFormat)
-}
-
-func (s *Site) preparePages() error {
-	var errors []error
-
-	for _, p := range s.Pages {
-		if err := p.prepareLayouts(); err != nil {
-			errors = append(errors, err)
-		}
-		if err := p.prepareData(s); err != nil {
-			errors = append(errors, err)
-		}
-	}
-
-	return s.owner.pickOneAndLogTheRest(errors)
-}
-
 func (s *Site) errorCollator(results <-chan error, errs chan<- error) {
 	var errors []error
 	for e := range results {
 		errors = append(errors, e)
 	}
 
-	errs <- s.owner.pickOneAndLogTheRest(errors)
+	errs <- s.h.pickOneAndLogTheRest(errors)
 
 	close(errs)
 }
@@ -1623,25 +1661,17 @@ func (s *Site) errorCollator(results <-chan error, errs chan<- error) {
 // When we now remove the Kind from this API, we need to make the transition as painless
 // as possible for existing sites. Most sites will use {{ .Site.GetPage "section" "my/section" }},
 // i.e. 2 arguments, so we test for that.
-func (s *SiteInfo) GetPage(ref ...string) (*Page, error) {
-	return s.getPageOldVersion(ref...)
-}
-
-func (s *Site) permalinkForOutputFormat(link string, f output.Format) (string, error) {
-	var (
-		baseURL string
-		err     error
-	)
+func (s *SiteInfo) GetPage(ref ...string) (page.Page, error) {
+	p, err := s.s.getPageOldVersion(ref...)
 
-	if f.Protocol != "" {
-		baseURL, err = s.PathSpec.BaseURL.WithProtocol(f.Protocol)
-		if err != nil {
-			return "", err
-		}
-	} else {
-		baseURL = s.PathSpec.BaseURL.String()
+	if p == nil {
+		// The nil struct has meaning in some situations, mostly to avoid breaking
+		// existing sites doing $nilpage.IsDescendant($p), which will always return
+		// false.
+		p = page.NilPage
 	}
-	return s.PathSpec.PermalinkForBaseURL(link, baseURL), nil
+
+	return p, err
 }
 
 func (s *Site) permalink(link string) string {
@@ -1653,7 +1683,6 @@ func (s *Site) renderAndWriteXML(statCounter *uint64, name string, targetPath st
 	s.Log.DEBUG.Printf("Render XML for %q to %q", name, targetPath)
 	renderBuffer := bp.GetBuffer()
 	defer bp.PutBuffer(renderBuffer)
-	renderBuffer.WriteString("<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>\n")
 
 	if err := s.renderForLayouts(name, d, renderBuffer, layouts...); err != nil {
 		return err
@@ -1684,11 +1713,11 @@ func (s *Site) renderAndWriteXML(statCounter *uint64, name string, targetPath st
 
 }
 
-func (s *Site) renderAndWritePage(statCounter *uint64, name string, targetPath string, p *PageOutput, layouts ...string) error {
+func (s *Site) renderAndWritePage(statCounter *uint64, name string, targetPath string, p *pageState, layouts ...string) error {
 	renderBuffer := bp.GetBuffer()
 	defer bp.PutBuffer(renderBuffer)
 
-	if err := s.renderForLayouts(p.Kind, p, renderBuffer, layouts...); err != nil {
+	if err := s.renderForLayouts(p.Kind(), p, renderBuffer, layouts...); err != nil {
 
 		return err
 	}
@@ -1697,7 +1726,7 @@ func (s *Site) renderAndWritePage(statCounter *uint64, name string, targetPath s
 		return nil
 	}
 
-	isHTML := p.outputFormat.IsHTML
+	isHTML := p.outputFormat().IsHTML
 
 	var path string
 
@@ -1715,7 +1744,7 @@ func (s *Site) renderAndWritePage(statCounter *uint64, name string, targetPath s
 		Src:          renderBuffer,
 		TargetPath:   targetPath,
 		StatCounter:  statCounter,
-		OutputFormat: p.outputFormat,
+		OutputFormat: p.outputFormat(),
 	}
 
 	if isHTML {
@@ -1743,19 +1772,20 @@ var infoOnMissingLayout = map[string]bool{
 }
 
 func (s *Site) renderForLayouts(name string, d interface{}, w io.Writer, layouts ...string) (err error) {
-	var templ tpl.Template
-
-	templ = s.findFirstTemplate(layouts...)
+	templ := s.findFirstTemplate(layouts...)
 	if templ == nil {
 		log := s.Log.WARN
 		if infoOnMissingLayout[name] {
 			log = s.Log.INFO
 		}
 
-		if p, ok := d.(*PageOutput); ok {
-			log.Printf("Found no layout for %q, language %q, output format %q: create a template below /layouts with one of these filenames: %s\n", name, s.Language.Lang, p.outputFormat.Name, layoutsLogFormat(layouts))
+		errMsg := "You should create a template file which matches Hugo Layouts Lookup Rules for this combination."
+
+		if p, ok := d.(pageContext); ok {
+			err := p.wrapError(errors.Errorf("found no layout file for %q: %s", name, errMsg))
+			log.Println(err)
 		} else {
-			log.Printf("Found no layout for %q, language %q: create a template below /layouts with one of these filenames: %s\n", name, s.Language.Lang, layoutsLogFormat(layouts))
+			log.Printf("Found no layout file for %q: %s\n", name, errMsg)
 		}
 		return nil
 	}
@@ -1766,20 +1796,6 @@ func (s *Site) renderForLayouts(name string, d interface{}, w io.Writer, layouts
 	return
 }
 
-func layoutsLogFormat(layouts []string) string {
-	var filtered []string
-	for _, l := range layouts {
-		// This is  a technical prefix of no interest to the user.
-		lt := strings.TrimPrefix(l, "_text/")
-		// We have this in the lookup path for historical reasons.
-		lt = strings.TrimPrefix(lt, "page/")
-		filtered = append(filtered, lt)
-	}
-
-	filtered = helpers.UniqueStrings(filtered)
-	return strings.Join(filtered, ", ")
-}
-
 func (s *Site) findFirstTemplate(layouts ...string) tpl.Template {
 	for _, layout := range layouts {
 		if templ, found := s.Tmpl.Lookup(layout); found {
@@ -1795,69 +1811,93 @@ func (s *Site) publish(statCounter *uint64, path string, r io.Reader) (err error
 	return helpers.WriteToDisk(filepath.Clean(path), r, s.BaseFs.PublishFs)
 }
 
-func getGoMaxProcs() int {
-	if gmp := os.Getenv("GOMAXPROCS"); gmp != "" {
-		if p, err := strconv.Atoi(gmp); err != nil {
-			return p
+func (s *Site) kindFromFileInfoOrSections(fi *fileInfo, sections []string) string {
+	if fi.TranslationBaseName() == "_index" {
+		if fi.Dir() == "" {
+			return page.KindHome
 		}
+
+		return s.kindFromSections(sections)
+
 	}
-	return 1
+	return page.KindPage
 }
 
-func (s *Site) newNodePage(typ string, sections ...string) *Page {
-	p := &Page{
-		language:        s.Language,
-		pageInit:        &pageInit{},
-		pageContentInit: &pageContentInit{},
-		Kind:            typ,
-		File:            &source.FileInfo{},
-		data:            make(map[string]interface{}),
-		Site:            &s.Info,
-		sections:        sections,
-		s:               s}
+func (s *Site) kindFromSections(sections []string) string {
+	if len(sections) == 0 || len(s.siteCfg.taxonomiesConfig) == 0 {
+		return page.KindSection
+	}
 
-	p.outputFormats = p.s.outputFormats[p.Kind]
+	sectionPath := path.Join(sections...)
 
-	return p
+	for _, plural := range s.siteCfg.taxonomiesConfig {
+		if plural == sectionPath {
+			return page.KindTaxonomyTerm
+		}
 
-}
+		if strings.HasPrefix(sectionPath, plural) {
+			return page.KindTaxonomy
+		}
 
-func (s *Site) newHomePage() *Page {
-	p := s.newNodePage(KindHome)
-	p.title = s.Info.Title
-	pages := Pages{}
-	p.data["Pages"] = pages
-	p.Pages = pages
-	return p
-}
+	}
 
-func (s *Site) newTaxonomyPage(plural, key string) *Page {
+	return page.KindSection
+}
 
-	p := s.newNodePage(KindTaxonomy, plural, key)
+func (s *Site) newTaxonomyPage(title string, sections ...string) *pageState {
+	p, err := newPageFromMeta(&pageMeta{
+		title:    title,
+		s:        s,
+		kind:     page.KindTaxonomy,
+		sections: sections,
+	})
 
-	if s.Info.preserveTaxonomyNames {
-		p.title = key
-	} else {
-		p.title = strings.Replace(s.titleFunc(key), "-", " ", -1)
+	if err != nil {
+		panic(err)
 	}
 
 	return p
+
 }
 
-func (s *Site) newSectionPage(name string) *Page {
-	p := s.newNodePage(KindSection, name)
+func (s *Site) newPage(kind string, sections ...string) *pageState {
+	p, err := newPageFromMeta(&pageMeta{
+		s:        s,
+		kind:     kind,
+		sections: sections,
+	})
 
-	sectionName := helpers.FirstUpper(name)
-	if s.Cfg.GetBool("pluralizeListTitles") {
-		p.title = inflect.Pluralize(sectionName)
-	} else {
-		p.title = sectionName
+	if err != nil {
+		panic(err)
 	}
+
 	return p
 }
 
-func (s *Site) newTaxonomyTermsPage(plural string) *Page {
-	p := s.newNodePage(KindTaxonomyTerm, plural)
-	p.title = s.titleFunc(plural)
-	return p
+func getGoMaxProcs() int {
+	if gmp := os.Getenv("GOMAXPROCS"); gmp != "" {
+		if p, err := strconv.Atoi(gmp); err != nil {
+			return p
+		}
+	}
+	return 1
+}
+
+func (s *Site) shouldBuild(p page.Page) bool {
+	return shouldBuild(s.BuildFuture, s.BuildExpired,
+		s.BuildDrafts, p.Draft(), p.PublishDate(), p.ExpiryDate())
+}
+
+func shouldBuild(buildFuture bool, buildExpired bool, buildDrafts bool, Draft bool,
+	publishDate time.Time, expiryDate time.Time) bool {
+	if !(buildDrafts || !Draft) {
+		return false
+	}
+	if !buildFuture && !publishDate.IsZero() && publishDate.After(time.Now()) {
+		return false
+	}
+	if !buildExpired && !expiryDate.IsZero() && expiryDate.Before(time.Now()) {
+		return false
+	}
+	return true
 }
diff --git a/hugolib/siteJSONEncode_test.go b/hugolib/siteJSONEncode_test.go
index 5bb6e52e822..9187751fb7a 100644
--- a/hugolib/siteJSONEncode_test.go
+++ b/hugolib/siteJSONEncode_test.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -14,12 +14,7 @@
 package hugolib
 
 import (
-	"encoding/json"
 	"testing"
-
-	"path/filepath"
-
-	"github.com/gohugoio/hugo/deps"
 )
 
 // Issue #1123
@@ -27,27 +22,22 @@ import (
 // May be smart to run with: -timeout 4000ms
 func TestEncodePage(t *testing.T) {
 	t.Parallel()
-	cfg, fs := newTestCfg()
 
-	writeSource(t, fs, filepath.Join("content", "page.md"), `---
-title: Simple
+	templ := `{{ index .Site.RegularPages 0 | jsonify }}`
+
+	b := newTestSitesBuilder(t)
+	b.WithSimpleConfigFile().WithTemplatesAdded("index.html", templ)
+	b.WithContent("page.md", `---
+title: "Page"
+date: 2019-02-28
 ---
-Summary text
 
-<!--more-->
-`)
+Content.
 
-	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+`)
 
-	_, err := json.Marshal(s)
-	check(t, err)
+	b.Build(BuildCfg{})
 
-	_, err = json.Marshal(s.RegularPages[0])
-	check(t, err)
-}
+	b.AssertFileContent("public/index.html", `"Date":"2019-02-28T00:00:00Z"`)
 
-func check(t *testing.T, err error) {
-	if err != nil {
-		t.Fatalf("Failed %s", err)
-	}
 }
diff --git a/hugolib/site_output.go b/hugolib/site_output.go
index 0a751396147..9fb2365066e 100644
--- a/hugolib/site_output.go
+++ b/hugolib/site_output.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -18,6 +18,7 @@ import (
 
 	"github.com/gohugoio/hugo/config"
 	"github.com/gohugoio/hugo/output"
+	"github.com/gohugoio/hugo/resources/page"
 	"github.com/spf13/cast"
 )
 
@@ -28,11 +29,11 @@ func createDefaultOutputFormats(allFormats output.Formats, cfg config.Provider)
 	sitemapOut, _ := allFormats.GetByName(output.SitemapFormat.Name)
 
 	return map[string]output.Formats{
-		KindPage:         {htmlOut},
-		KindHome:         {htmlOut, rssOut},
-		KindSection:      {htmlOut, rssOut},
-		KindTaxonomy:     {htmlOut, rssOut},
-		KindTaxonomyTerm: {htmlOut, rssOut},
+		page.KindPage:         {htmlOut},
+		page.KindHome:         {htmlOut, rssOut},
+		page.KindSection:      {htmlOut, rssOut},
+		page.KindTaxonomy:     {htmlOut, rssOut},
+		page.KindTaxonomyTerm: {htmlOut, rssOut},
 		// Below are for conistency. They are currently not used during rendering.
 		kindRSS:       {rssOut},
 		kindSitemap:   {sitemapOut},
@@ -65,7 +66,7 @@ func createSiteOutputFormats(allFormats output.Formats, cfg config.Provider) (ma
 		for _, format := range vals {
 			f, found := allFormats.GetByName(format)
 			if !found {
-				return nil, fmt.Errorf("Failed to resolve output format %q from site config", format)
+				return nil, fmt.Errorf("failed to resolve output format %q from site config", format)
 			}
 			formats = append(formats, f)
 		}
diff --git a/hugolib/site_output_test.go b/hugolib/site_output_test.go
index e9a7e113e97..e4947e5cdf5 100644
--- a/hugolib/site_output_test.go
+++ b/hugolib/site_output_test.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -17,6 +17,8 @@ import (
 	"strings"
 	"testing"
 
+	"github.com/gohugoio/hugo/resources/page"
+
 	"github.com/spf13/afero"
 
 	"github.com/stretchr/testify/require"
@@ -148,15 +150,15 @@ Len Pages: {{ .Kind }} {{ len .Site.RegularPages }} Page Number: {{ .Paginator.P
 	require.NoError(t, err)
 
 	s := h.Sites[0]
-	require.Equal(t, "en", s.Language.Lang)
+	require.Equal(t, "en", s.language.Lang)
 
-	home := s.getPage(KindHome)
+	home := s.getPage(page.KindHome)
 
 	require.NotNil(t, home)
 
 	lenOut := len(outputs)
 
-	require.Len(t, home.outputFormats, lenOut)
+	require.Len(t, home.OutputFormats(), lenOut)
 
 	// There is currently always a JSON output to make it simpler ...
 	altFormats := lenOut - 1
@@ -207,12 +209,8 @@ Len Pages: {{ .Kind }} {{ len .Site.RegularPages }} Page Number: {{ .Paginator.P
 	}
 
 	of := home.OutputFormats()
-	require.Len(t, of, lenOut)
-	require.Nil(t, of.Get("Hugo"))
-	require.NotNil(t, of.Get("json"))
+
 	json := of.Get("JSON")
-	_, err = home.AlternativeOutputFormats()
-	require.Error(t, err)
 	require.NotNil(t, json)
 	require.Equal(t, "/blog/index.json", json.RelPermalink())
 	require.Equal(t, "http://example.com/blog/index.json", json.Permalink())
@@ -323,7 +321,7 @@ baseName = "customdelimbase"
 	th.assertFileContent("public/customdelimbase_del", "custom delim")
 
 	s := h.Sites[0]
-	home := s.getPage(KindHome)
+	home := s.getPage(page.KindHome)
 	require.NotNil(t, home)
 
 	outputs := home.OutputFormats()
@@ -339,8 +337,8 @@ func TestCreateSiteOutputFormats(t *testing.T) {
 	assert := require.New(t)
 
 	outputsConfig := map[string]interface{}{
-		KindHome:    []string{"HTML", "JSON"},
-		KindSection: []string{"JSON"},
+		page.KindHome:    []string{"HTML", "JSON"},
+		page.KindSection: []string{"JSON"},
 	}
 
 	cfg := viper.New()
@@ -348,13 +346,13 @@ func TestCreateSiteOutputFormats(t *testing.T) {
 
 	outputs, err := createSiteOutputFormats(output.DefaultFormats, cfg)
 	assert.NoError(err)
-	assert.Equal(output.Formats{output.JSONFormat}, outputs[KindSection])
-	assert.Equal(output.Formats{output.HTMLFormat, output.JSONFormat}, outputs[KindHome])
+	assert.Equal(output.Formats{output.JSONFormat}, outputs[page.KindSection])
+	assert.Equal(output.Formats{output.HTMLFormat, output.JSONFormat}, outputs[page.KindHome])
 
 	// Defaults
-	assert.Equal(output.Formats{output.HTMLFormat, output.RSSFormat}, outputs[KindTaxonomy])
-	assert.Equal(output.Formats{output.HTMLFormat, output.RSSFormat}, outputs[KindTaxonomyTerm])
-	assert.Equal(output.Formats{output.HTMLFormat}, outputs[KindPage])
+	assert.Equal(output.Formats{output.HTMLFormat, output.RSSFormat}, outputs[page.KindTaxonomy])
+	assert.Equal(output.Formats{output.HTMLFormat, output.RSSFormat}, outputs[page.KindTaxonomyTerm])
+	assert.Equal(output.Formats{output.HTMLFormat}, outputs[page.KindPage])
 
 	// These aren't (currently) in use when rendering in Hugo,
 	// but the pages needs to be assigned an output format,
@@ -370,7 +368,7 @@ func TestCreateSiteOutputFormatsInvalidConfig(t *testing.T) {
 	assert := require.New(t)
 
 	outputsConfig := map[string]interface{}{
-		KindHome: []string{"FOO", "JSON"},
+		page.KindHome: []string{"FOO", "JSON"},
 	}
 
 	cfg := viper.New()
@@ -384,7 +382,7 @@ func TestCreateSiteOutputFormatsEmptyConfig(t *testing.T) {
 	assert := require.New(t)
 
 	outputsConfig := map[string]interface{}{
-		KindHome: []string{},
+		page.KindHome: []string{},
 	}
 
 	cfg := viper.New()
@@ -392,14 +390,14 @@ func TestCreateSiteOutputFormatsEmptyConfig(t *testing.T) {
 
 	outputs, err := createSiteOutputFormats(output.DefaultFormats, cfg)
 	assert.NoError(err)
-	assert.Equal(output.Formats{output.HTMLFormat, output.RSSFormat}, outputs[KindHome])
+	assert.Equal(output.Formats{output.HTMLFormat, output.RSSFormat}, outputs[page.KindHome])
 }
 
 func TestCreateSiteOutputFormatsCustomFormats(t *testing.T) {
 	assert := require.New(t)
 
 	outputsConfig := map[string]interface{}{
-		KindHome: []string{},
+		page.KindHome: []string{},
 	}
 
 	cfg := viper.New()
@@ -412,5 +410,5 @@ func TestCreateSiteOutputFormatsCustomFormats(t *testing.T) {
 
 	outputs, err := createSiteOutputFormats(output.Formats{customRSS, customHTML}, cfg)
 	assert.NoError(err)
-	assert.Equal(output.Formats{customHTML, customRSS}, outputs[KindHome])
+	assert.Equal(output.Formats{customHTML, customRSS}, outputs[page.KindHome])
 }
diff --git a/hugolib/site_render.go b/hugolib/site_render.go
index 4ce2b4c53d1..8b4d6261751 100644
--- a/hugolib/site_render.go
+++ b/hugolib/site_render.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -19,17 +19,44 @@ import (
 	"strings"
 	"sync"
 
+	"github.com/gohugoio/hugo/output"
 	"github.com/pkg/errors"
 
-	"github.com/gohugoio/hugo/output"
+	"github.com/gohugoio/hugo/resources/page"
+	"github.com/gohugoio/hugo/resources/page/pagemeta"
 )
 
+type siteRenderContext struct {
+	cfg *BuildCfg
+
+	// Zero based index for all output formats combined.
+	sitesOutIdx int
+
+	// Zero based index of the output formats configured within a Site.
+	outIdx int
+
+	multihost bool
+}
+
+// Whether to render 404.html, robotsTXT.txt which usually is rendered
+// once only in the site root.
+func (s siteRenderContext) renderSingletonPages() bool {
+	if s.multihost {
+		// 1 per site
+		return s.outIdx == 0
+	}
+
+	// 1 for all sites
+	return s.sitesOutIdx == 0
+
+}
+
 // renderPages renders pages each corresponding to a markdown file.
 // TODO(bep np doc
-func (s *Site) renderPages(cfg *BuildCfg) error {
+func (s *Site) renderPages(ctx *siteRenderContext) error {
 
 	results := make(chan error)
-	pages := make(chan *Page)
+	pages := make(chan *pageState)
 	errs := make(chan error)
 
 	go s.errorCollator(results, errs)
@@ -40,17 +67,25 @@ func (s *Site) renderPages(cfg *BuildCfg) error {
 
 	for i := 0; i < numWorkers; i++ {
 		wg.Add(1)
-		go pageRenderer(s, pages, results, wg)
+		go pageRenderer(ctx, s, pages, results, wg)
 	}
 
-	if !cfg.PartialReRender && len(s.headlessPages) > 0 {
+	cfg := ctx.cfg
+
+	if !cfg.PartialReRender && ctx.outIdx == 0 && len(s.headlessPages) > 0 {
 		wg.Add(1)
 		go headlessPagesPublisher(s, wg)
 	}
 
-	for _, page := range s.Pages {
+L:
+	for _, page := range s.workAllPages {
 		if cfg.shouldRender(page) {
-			pages <- page
+			select {
+			case <-s.h.Done():
+				break L
+			default:
+				pages <- page
+			}
 		}
 	}
 
@@ -69,207 +104,103 @@ func (s *Site) renderPages(cfg *BuildCfg) error {
 
 func headlessPagesPublisher(s *Site, wg *sync.WaitGroup) {
 	defer wg.Done()
-	for _, page := range s.headlessPages {
-		outFormat := page.outputFormats[0] // There is only one
-		if outFormat.Name != s.rc.Format.Name {
-			// Avoid double work.
-			continue
-		}
-		pageOutput, err := newPageOutput(page, false, false, outFormat)
-		if err == nil {
-			page.mainPageOutput = pageOutput
-			err = pageOutput.renderResources()
-		}
-
-		if err != nil {
-			s.Log.ERROR.Printf("Failed to render resources for headless page %q: %s", page, err)
+	for _, p := range s.headlessPages {
+		if err := p.renderResources(); err != nil {
+			s.SendError(p.errorf(err, "failed to render page resources"))
 		}
 	}
 }
 
-func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.WaitGroup) {
-	defer wg.Done()
-
-	for page := range pages {
+func pageRenderer(
+	ctx *siteRenderContext,
+	s *Site,
+	pages <-chan *pageState,
+	results chan<- error,
+	wg *sync.WaitGroup) {
 
-		for i, outFormat := range page.outputFormats {
+	defer wg.Done()
 
-			if outFormat.Name != page.s.rc.Format.Name {
-				// Will be rendered  ... later.
-				continue
-			}
+	for p := range pages {
+		if !p.render {
+			continue
+		}
 
-			var (
-				pageOutput *PageOutput
-				err        error
-			)
+		f := p.outputFormat()
 
-			if i == 0 {
-				pageOutput = page.mainPageOutput
-			} else {
-				pageOutput, err = page.mainPageOutput.copyWithFormat(outFormat, true)
-			}
+		// TODO(bep) get rid of this odd construct. RSS is an output format.
+		if f.Name == "RSS" && !s.isEnabled(kindRSS) {
+			continue
+		}
 
-			if err != nil {
-				s.Log.ERROR.Printf("Failed to create output page for type %q for page %q: %s", outFormat.Name, page, err)
+		if ctx.outIdx == 0 {
+			if err := p.renderResources(); err != nil {
+				s.SendError(p.errorf(err, "failed to render page resources"))
 				continue
 			}
+		}
 
-			if pageOutput == nil {
-				panic("no pageOutput")
-			}
-
-			// We only need to re-publish the resources if the output format is different
-			// from all of the previous (e.g. the "amp" use case).
-			shouldRender := i == 0
-			if i > 0 {
-				for j := i; j >= 0; j-- {
-					if outFormat.Path != page.outputFormats[j].Path {
-						shouldRender = true
-					} else {
-						shouldRender = false
-					}
-				}
-			}
-
-			if shouldRender {
-				if err := pageOutput.renderResources(); err != nil {
-					s.SendError(page.errorf(err, "failed to render page resources"))
-					continue
-				}
-			}
-
-			var layouts []string
-
-			if page.selfLayout != "" {
-				layouts = []string{page.selfLayout}
-			} else {
-				layouts, err = s.layouts(pageOutput)
-				if err != nil {
-					s.Log.ERROR.Printf("Failed to resolve layout for output %q for page %q: %s", outFormat.Name, page, err)
-					continue
-				}
-			}
-
-			switch pageOutput.outputFormat.Name {
+		layouts, err := p.getLayouts()
+		if err != nil {
+			s.Log.ERROR.Printf("Failed to resolve layout for output %q for page %q: %s", f.Name, p, err)
+			continue
+		}
 
-			case "RSS":
-				if err := s.renderRSS(pageOutput); err != nil {
-					results <- err
-				}
-			default:
-				targetPath, err := pageOutput.targetPath()
-				if err != nil {
-					s.Log.ERROR.Printf("Failed to create target path for output %q for page %q: %s", outFormat.Name, page, err)
-					continue
-				}
+		targetPath := p.targetPaths().TargetFilename
 
-				s.Log.DEBUG.Printf("Render %s to %q with layouts %q", pageOutput.Kind, targetPath, layouts)
+		if targetPath == "" {
+			s.Log.ERROR.Printf("Failed to create target path for output %q for page %q: %s", f.Name, p, err)
+			continue
+		}
 
-				if err := s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "page "+pageOutput.FullFilePath(), targetPath, pageOutput, layouts...); err != nil {
-					results <- err
-				}
+		if err := s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "page "+p.Title(), targetPath, p, layouts...); err != nil {
+			results <- err
+		}
 
-				// Only render paginators for the main output format
-				if i == 0 && pageOutput.IsNode() {
-					if err := s.renderPaginator(pageOutput); err != nil {
-						results <- err
-					}
-				}
+		if p.paginator != nil && p.paginator.current != nil {
+			if err := s.renderPaginator(p, layouts); err != nil {
+				results <- err
 			}
-
 		}
 	}
 }
 
 // renderPaginator must be run after the owning Page has been rendered.
-func (s *Site) renderPaginator(p *PageOutput) error {
-	if p.paginator != nil {
-		s.Log.DEBUG.Printf("Render paginator for page %q", p.Path())
-		paginatePath := s.Cfg.GetString("paginatePath")
-
-		// write alias for page 1
-		addend := fmt.Sprintf("/%s/%d", paginatePath, 1)
-		target, err := p.createTargetPath(p.outputFormat, false, addend)
-		if err != nil {
-			return err
-		}
-
-		// TODO(bep) do better
-		link := newOutputFormat(p.Page, p.outputFormat).Permalink()
-		if err := s.writeDestAlias(target, link, p.outputFormat, nil); err != nil {
-			return err
-		}
-
-		pagers := p.paginator.Pagers()
-
-		for i, pager := range pagers {
-			if i == 0 {
-				// already created
-				continue
-			}
+func (s *Site) renderPaginator(p *pageState, layouts []string) error {
 
-			pagerNode, err := p.copy()
-			if err != nil {
-				return err
-			}
+	paginatePath := s.Cfg.GetString("paginatePath")
 
-			pagerNode.origOnCopy = p.Page
+	d := p.targetPathDescriptor
+	f := p.s.rc.Format
+	d.Type = f
 
-			pagerNode.paginator = pager
-			if pager.TotalPages() > 0 {
-				first, _ := pager.page(0)
-				pagerNode.Date = first.Date
-				pagerNode.Lastmod = first.Lastmod
-			}
+	// Rewind
+	p.paginator.current = p.paginator.current.First()
 
-			pageNumber := i + 1
-			addend := fmt.Sprintf("/%s/%d", paginatePath, pageNumber)
-			targetPath, _ := p.targetPath(addend)
-			layouts, err := p.layouts()
-
-			if err != nil {
-				return err
-			}
-
-			if err := s.renderAndWritePage(
-				&s.PathSpec.ProcessingStats.PaginatorPages,
-				pagerNode.title,
-				targetPath, pagerNode, layouts...); err != nil {
-				return err
-			}
+	// Write alias for page 1
+	d.Addends = fmt.Sprintf("/%s/%d", paginatePath, 1)
+	targetPaths := page.CreateTargetPaths(d)
 
-		}
+	if err := s.writeDestAlias(targetPaths.TargetFilename, p.Permalink(), f, nil); err != nil {
+		return err
 	}
-	return nil
-}
 
-func (s *Site) renderRSS(p *PageOutput) error {
+	// Render pages for the rest
+	for current := p.paginator.current.Next(); current != nil; current = current.Next() {
 
-	if !s.isEnabled(kindRSS) {
-		return nil
-	}
-
-	limit := s.Cfg.GetInt("rssLimit")
-	if limit >= 0 && len(p.Pages) > limit {
-		p.Pages = p.Pages[:limit]
-		p.data["Pages"] = p.Pages
-	}
+		p.paginator.current = current
+		d.Addends = fmt.Sprintf("/%s/%d", paginatePath, current.PageNumber())
+		targetPaths := page.CreateTargetPaths(d)
 
-	layouts, err := s.layoutHandler.For(
-		p.layoutDescriptor,
-		p.outputFormat)
-	if err != nil {
-		return err
-	}
+		if err := s.renderAndWritePage(
+			&s.PathSpec.ProcessingStats.PaginatorPages,
+			p.Title(),
+			targetPaths.TargetFilename, p, layouts...); err != nil {
+			return err
+		}
 
-	targetPath, err := p.targetPath()
-	if err != nil {
-		return err
 	}
 
-	return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Pages, p.title,
-		targetPath, p, layouts...)
+	return nil
 }
 
 func (s *Site) render404() error {
@@ -277,33 +208,29 @@ func (s *Site) render404() error {
 		return nil
 	}
 
-	p := s.newNodePage(kind404)
+	p, err := newPageStandalone(&pageMeta{
+		s:    s,
+		kind: kind404,
+		urlPaths: pagemeta.URLPath{
+			URL: path.Join(s.GetURLLanguageBasePath(), "404.html"),
+		},
+	},
+		output.HTMLFormat,
+	)
 
-	p.title = "404 Page not found"
-	p.data["Pages"] = s.Pages
-	p.Pages = s.Pages
-	p.URLPath.URL = "404.html"
-
-	if err := p.initTargetPathDescriptor(); err != nil {
+	if err != nil {
 		return err
 	}
 
 	nfLayouts := []string{"404.html"}
 
-	htmlOut := output.HTMLFormat
-	htmlOut.BaseName = "404"
-
-	pageOutput, err := newPageOutput(p, false, false, htmlOut)
-	if err != nil {
-		return err
-	}
+	targetPath := p.targetPaths().TargetFilename
 
-	targetPath, err := pageOutput.targetPath()
-	if err != nil {
-		s.Log.ERROR.Printf("Failed to create target path for page %q: %s", p, err)
+	if targetPath == "" {
+		return errors.New("failed to create targetPath for 404 page")
 	}
 
-	return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "404 page", targetPath, pageOutput, nfLayouts...)
+	return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "404 page", targetPath, p, nfLayouts...)
 }
 
 func (s *Site) renderSitemap() error {
@@ -311,50 +238,28 @@ func (s *Site) renderSitemap() error {
 		return nil
 	}
 
-	sitemapDefault := parseSitemap(s.Cfg.GetStringMap("sitemap"))
-
-	n := s.newNodePage(kindSitemap)
+	p, err := newPageStandalone(&pageMeta{
+		s:    s,
+		kind: kindSitemap,
+		urlPaths: pagemeta.URLPath{
+			URL: s.siteCfg.sitemap.Filename,
+		}},
+		output.HTMLFormat,
+	)
 
-	// Include all pages (regular, home page, taxonomies etc.)
-	pages := s.Pages
-
-	page := s.newNodePage(kindSitemap)
-	page.URLPath.URL = ""
-	if err := page.initTargetPathDescriptor(); err != nil {
-		return err
-	}
-	page.Sitemap.ChangeFreq = sitemapDefault.ChangeFreq
-	page.Sitemap.Priority = sitemapDefault.Priority
-	page.Sitemap.Filename = sitemapDefault.Filename
-
-	n.data["Pages"] = pages
-	n.Pages = pages
-
-	// TODO(bep) we have several of these
-	if err := page.initTargetPathDescriptor(); err != nil {
+	if err != nil {
 		return err
 	}
 
-	// TODO(bep) this should be done somewhere else
-	for _, page := range pages {
-		if page.Sitemap.ChangeFreq == "" {
-			page.Sitemap.ChangeFreq = sitemapDefault.ChangeFreq
-		}
-
-		if page.Sitemap.Priority == -1 {
-			page.Sitemap.Priority = sitemapDefault.Priority
-		}
+	targetPath := p.targetPaths().TargetFilename
 
-		if page.Sitemap.Filename == "" {
-			page.Sitemap.Filename = sitemapDefault.Filename
-		}
+	if targetPath == "" {
+		return errors.New("failed to create targetPath for sitemap")
 	}
 
 	smLayouts := []string{"sitemap.xml", "_default/sitemap.xml", "_internal/_default/sitemap.xml"}
-	addLanguagePrefix := n.Site.IsMultiLingual()
 
-	return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemap",
-		n.addLangPathPrefixIfFlagSet(page.Sitemap.Filename, addLanguagePrefix), n, smLayouts...)
+	return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemap", targetPath, p, smLayouts...)
 }
 
 func (s *Site) renderRobotsTXT() error {
@@ -366,53 +271,50 @@ func (s *Site) renderRobotsTXT() error {
 		return nil
 	}
 
-	p := s.newNodePage(kindRobotsTXT)
-	if err := p.initTargetPathDescriptor(); err != nil {
-		return err
-	}
-	p.data["Pages"] = s.Pages
-	p.Pages = s.Pages
-
-	rLayouts := []string{"robots.txt", "_default/robots.txt", "_internal/_default/robots.txt"}
+	p, err := newPageStandalone(&pageMeta{
+		s:    s,
+		kind: kindRobotsTXT,
+		urlPaths: pagemeta.URLPath{
+			URL: path.Join(s.GetURLLanguageBasePath(), "robots.txt"),
+		},
+	},
+		output.RobotsTxtFormat)
 
-	pageOutput, err := newPageOutput(p, false, false, output.RobotsTxtFormat)
 	if err != nil {
 		return err
 	}
 
-	targetPath, err := pageOutput.targetPath()
-	if err != nil {
-		s.Log.ERROR.Printf("Failed to create target path for page %q: %s", p, err)
-	}
+	rLayouts := []string{"robots.txt", "_default/robots.txt", "_internal/_default/robots.txt"}
 
-	return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "Robots Txt", targetPath, pageOutput, rLayouts...)
+	return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "Robots Txt", p.targetPaths().TargetFilename, p, rLayouts...)
 
 }
 
 // renderAliases renders shell pages that simply have a redirect in the header.
 func (s *Site) renderAliases() error {
-	for _, p := range s.Pages {
-		if len(p.Aliases) == 0 {
+	for _, p := range s.workAllPages {
+
+		if len(p.Aliases()) == 0 {
 			continue
 		}
 
-		for _, f := range p.outputFormats {
-			if !f.IsHTML {
+		for _, of := range p.OutputFormats() {
+			if !of.Format.IsHTML {
 				continue
 			}
 
-			o := newOutputFormat(p, f)
-			plink := o.Permalink()
+			plink := of.Permalink()
+			f := of.Format
 
-			for _, a := range p.Aliases {
+			for _, a := range p.Aliases() {
 				if f.Path != "" {
 					// Make sure AMP and similar doesn't clash with regular aliases.
 					a = path.Join(a, f.Path)
 				}
 
-				lang := p.Lang()
+				lang := p.Language().Lang
 
-				if s.owner.multihost && !strings.HasPrefix(a, "/"+lang) {
+				if s.h.multihost && !strings.HasPrefix(a, "/"+lang) {
 					// These need to be in its language root.
 					a = path.Join(lang, a)
 				}
@@ -424,22 +326,32 @@ func (s *Site) renderAliases() error {
 		}
 	}
 
-	if s.owner.multilingual.enabled() && !s.owner.IsMultihost() {
-		html, found := s.outputFormatsConfig.GetByName("HTML")
-		if found {
-			mainLang := s.owner.multilingual.DefaultLang
-			if s.Info.defaultContentLanguageInSubdir {
-				mainLangURL := s.PathSpec.AbsURL(mainLang.Lang, false)
-				s.Log.DEBUG.Printf("Write redirect to main language %s: %s", mainLang, mainLangURL)
-				if err := s.publishDestAlias(true, "/", mainLangURL, html, nil); err != nil {
-					return err
-				}
-			} else {
-				mainLangURL := s.PathSpec.AbsURL("", false)
-				s.Log.DEBUG.Printf("Write redirect to main language %s: %s", mainLang, mainLangURL)
-				if err := s.publishDestAlias(true, mainLang.Lang, mainLangURL, html, nil); err != nil {
-					return err
-				}
+	return nil
+}
+
+// renderMainLanguageRedirect creates a redirect to the main language home,
+// depending on if it lives in sub folder (e.g. /en) or not.
+func (s *Site) renderMainLanguageRedirect() error {
+
+	if !s.h.multilingual.enabled() || s.h.IsMultihost() {
+		// No need for a redirect
+		return nil
+	}
+
+	html, found := s.outputFormatsConfig.GetByName("HTML")
+	if found {
+		mainLang := s.h.multilingual.DefaultLang
+		if s.Info.defaultContentLanguageInSubdir {
+			mainLangURL := s.PathSpec.AbsURL(mainLang.Lang, false)
+			s.Log.DEBUG.Printf("Write redirect to main language %s: %s", mainLang, mainLangURL)
+			if err := s.publishDestAlias(true, "/", mainLangURL, html, nil); err != nil {
+				return err
+			}
+		} else {
+			mainLangURL := s.PathSpec.AbsURL("", false)
+			s.Log.DEBUG.Printf("Write redirect to main language %s: %s", mainLang, mainLangURL)
+			if err := s.publishDestAlias(true, mainLang.Lang, mainLangURL, html, nil); err != nil {
+				return err
 			}
 		}
 	}
diff --git a/hugolib/site_sections.go b/hugolib/site_sections.go
index 38f6a3b6fce..d383e638906 100644
--- a/hugolib/site_sections.go
+++ b/hugolib/site_sections.go
@@ -1,4 +1,4 @@
-// Copyright 2017 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -14,18 +14,18 @@
 package hugolib
 
 import (
-	"fmt"
 	"path"
 	"strconv"
 	"strings"
 
-	"github.com/gohugoio/hugo/helpers"
+	"github.com/gohugoio/hugo/resources/page"
+	"github.com/gohugoio/hugo/resources/resource"
 
 	radix "github.com/hashicorp/go-immutable-radix"
 )
 
 // Sections returns the top level sections.
-func (s *SiteInfo) Sections() Pages {
+func (s *SiteInfo) Sections() page.Pages {
 	home, err := s.Home()
 	if err == nil {
 		return home.Sections()
@@ -34,157 +34,23 @@ func (s *SiteInfo) Sections() Pages {
 }
 
 // Home is a shortcut to the home page, equivalent to .Site.GetPage "home".
-func (s *SiteInfo) Home() (*Page, error) {
-	return s.GetPage(KindHome)
+func (s *SiteInfo) Home() (page.Page, error) {
+	return s.s.home, nil
 }
 
-// Parent returns a section's parent section or a page's section.
-// To get a section's subsections, see Page's Sections method.
-func (p *Page) Parent() *Page {
-	return p.parent
-}
-
-// CurrentSection returns the page's current section or the page itself if home or a section.
-// Note that this will return nil for pages that is not regular, home or section pages.
-func (p *Page) CurrentSection() *Page {
-	v := p
-	if v.origOnCopy != nil {
-		v = v.origOnCopy
-	}
-	if v.IsHome() || v.IsSection() {
-		return v
-	}
-
-	return v.parent
-}
-
-// FirstSection returns the section on level 1 below home, e.g. "/docs".
-// For the home page, this will return itself.
-func (p *Page) FirstSection() *Page {
-	v := p
-	if v.origOnCopy != nil {
-		v = v.origOnCopy
-	}
-
-	if v.parent == nil || v.parent.IsHome() {
-		return v
-	}
-
-	parent := v.parent
-	for {
-		current := parent
-		parent = parent.parent
-		if parent == nil || parent.IsHome() {
-			return current
-		}
-	}
-
-}
-
-// InSection returns whether the given page is in the current section.
-// Note that this will always return false for pages that are
-// not either regular, home or section pages.
-func (p *Page) InSection(other interface{}) (bool, error) {
-	if p == nil || other == nil {
-		return false, nil
-	}
-
-	pp, err := unwrapPage(other)
-	if err != nil {
-		return false, err
-	}
-
-	if pp == nil {
-		return false, nil
-	}
-
-	return pp.CurrentSection() == p.CurrentSection(), nil
-}
-
-// IsDescendant returns whether the current page is a descendant of the given page.
-// Note that this method is not relevant for taxonomy lists and taxonomy terms pages.
-func (p *Page) IsDescendant(other interface{}) (bool, error) {
-	if p == nil {
-		return false, nil
-	}
-	pp, err := unwrapPage(other)
-	if err != nil || pp == nil {
-		return false, err
-	}
-
-	if pp.Kind == KindPage && len(p.sections) == len(pp.sections) {
-		// A regular page is never its section's descendant.
-		return false, nil
-	}
-	return helpers.HasStringsPrefix(p.sections, pp.sections), nil
-}
-
-// IsAncestor returns whether the current page is an ancestor of the given page.
-// Note that this method is not relevant for taxonomy lists and taxonomy terms pages.
-func (p *Page) IsAncestor(other interface{}) (bool, error) {
-	if p == nil {
-		return false, nil
-	}
+func (s *Site) assembleSections() pageStatePages {
+	var newPages pageStatePages
 
-	pp, err := unwrapPage(other)
-	if err != nil || pp == nil {
-		return false, err
-	}
-
-	if p.Kind == KindPage && len(p.sections) == len(pp.sections) {
-		// A regular page is never its section's ancestor.
-		return false, nil
-	}
-
-	return helpers.HasStringsPrefix(pp.sections, p.sections), nil
-}
-
-// Eq returns whether the current page equals the given page.
-// Note that this is more accurate than doing `{{ if eq $page $otherPage }}`
-// since a Page can be embedded in another type.
-func (p *Page) Eq(other interface{}) bool {
-	pp, err := unwrapPage(other)
-	if err != nil {
-		return false
-	}
-
-	return p == pp
-}
-
-func unwrapPage(in interface{}) (*Page, error) {
-	switch v := in.(type) {
-	case *Page:
-		return v, nil
-	case *PageOutput:
-		return v.Page, nil
-	case *PageWithoutContent:
-		return v.Page, nil
-	case nil:
-		return nil, nil
-	default:
-		return nil, fmt.Errorf("%T not supported", in)
-	}
-}
-
-// Sections returns this section's subsections, if any.
-// Note that for non-sections, this method will always return an empty list.
-func (p *Page) Sections() Pages {
-	return p.subSections
-}
-
-func (s *Site) assembleSections() Pages {
-	var newPages Pages
-
-	if !s.isEnabled(KindSection) {
+	if !s.isEnabled(page.KindSection) {
 		return newPages
 	}
 
 	// Maps section kind pages to their path, i.e. "my/section"
-	sectionPages := make(map[string]*Page)
+	sectionPages := make(map[string]*pageState)
 
 	// The sections with content files will already have been created.
-	for _, sect := range s.findPagesByKind(KindSection) {
-		sectionPages[path.Join(sect.sections...)] = sect
+	for _, sect := range s.findWorkPagesByKind(page.KindSection) {
+		sectionPages[sect.SectionsPath()] = sect
 	}
 
 	const (
@@ -196,39 +62,44 @@ func (s *Site) assembleSections() Pages {
 	var (
 		inPages    = radix.New().Txn()
 		inSections = radix.New().Txn()
-		undecided  Pages
+		undecided  pageStatePages
 	)
 
-	home := s.findFirstPageByKindIn(KindHome, s.Pages)
+	home := s.findFirstWorkPageByKindIn(page.KindHome)
 
-	for i, p := range s.Pages {
-		if p.Kind != KindPage {
+	for i, p := range s.workAllPages {
+
+		if p.Kind() != page.KindPage {
 			continue
 		}
 
-		if len(p.sections) == 0 {
+		sections := p.SectionsEntries()
+
+		if len(sections) == 0 {
 			// Root level pages. These will have the home page as their Parent.
 			p.parent = home
 			continue
 		}
 
-		sectionKey := path.Join(p.sections...)
-		sect, found := sectionPages[sectionKey]
+		sectionKey := p.SectionsPath()
+		_, found := sectionPages[sectionKey]
+
+		if !found && len(sections) == 1 {
 
-		if !found && len(p.sections) == 1 {
 			// We only create content-file-less sections for the root sections.
-			sect = s.newSectionPage(p.sections[0])
-			sectionPages[sectionKey] = sect
-			newPages = append(newPages, sect)
+			n := s.newPage(page.KindSection, sections[0])
+
+			sectionPages[sectionKey] = n
+			newPages = append(newPages, n)
 			found = true
 		}
 
-		if len(p.sections) > 1 {
+		if len(sections) > 1 {
 			// Create the root section if not found.
-			_, rootFound := sectionPages[p.sections[0]]
+			_, rootFound := sectionPages[sections[0]]
 			if !rootFound {
-				sect = s.newSectionPage(p.sections[0])
-				sectionPages[p.sections[0]] = sect
+				sect := s.newPage(page.KindSection, sections[0])
+				sectionPages[sections[0]] = sect
 				newPages = append(newPages, sect)
 			}
 		}
@@ -246,13 +117,14 @@ func (s *Site) assembleSections() Pages {
 	// given a content file in /content/a/b/c/_index.md, we cannot create just
 	// the c section.
 	for _, sect := range sectionPages {
-		for i := len(sect.sections); i > 0; i-- {
-			sectionPath := sect.sections[:i]
+		sections := sect.SectionsEntries()
+		for i := len(sections); i > 0; i-- {
+			sectionPath := sections[:i]
 			sectionKey := path.Join(sectionPath...)
-			sect, found := sectionPages[sectionKey]
+			_, found := sectionPages[sectionKey]
 			if !found {
-				sect = s.newSectionPage(sectionPath[len(sectionPath)-1])
-				sect.sections = sectionPath
+				sect = s.newPage(page.KindSection, sectionPath[len(sectionPath)-1])
+				sect.m.sections = sectionPath
 				sectionPages[sectionKey] = sect
 				newPages = append(newPages, sect)
 			}
@@ -265,33 +137,36 @@ func (s *Site) assembleSections() Pages {
 	}
 
 	var (
-		currentSection *Page
-		children       Pages
+		currentSection *pageState
+		children       page.Pages
+		dates          *resource.Dates
 		rootSections   = inSections.Commit().Root()
 	)
 
 	for i, p := range undecided {
 		// Now we can decide where to put this page into the tree.
-		sectionKey := path.Join(p.sections...)
+		sectionKey := p.SectionsPath()
+
 		_, v, _ := rootSections.LongestPrefix([]byte(sectionKey))
-		sect := v.(*Page)
-		pagePath := path.Join(path.Join(sect.sections...), sectSectKey, "u", strconv.Itoa(i))
+		sect := v.(*pageState)
+		pagePath := path.Join(path.Join(sect.SectionsEntries()...), sectSectKey, "u", strconv.Itoa(i))
 		inPages.Insert([]byte(pagePath), p)
 	}
 
 	var rootPages = inPages.Commit().Root()
 
 	rootPages.Walk(func(path []byte, v interface{}) bool {
-		p := v.(*Page)
+		p := v.(*pageState)
 
-		if p.Kind == KindSection {
+		if p.Kind() == page.KindSection {
 			if currentSection != nil {
 				// A new section
-				currentSection.setPagePages(children)
+				currentSection.setPages(children)
 			}
 
 			currentSection = p
-			children = make(Pages, 0)
+			children = make(page.Pages, 0)
+			dates = &resource.Dates{}
 
 			return false
 
@@ -300,27 +175,31 @@ func (s *Site) assembleSections() Pages {
 		// Regular page
 		p.parent = currentSection
 		children = append(children, p)
+		dates.UpdateDateAndLastmodIfAfter(p)
 		return false
 	})
 
 	if currentSection != nil {
-		currentSection.setPagePages(children)
+		currentSection.setPages(children)
+		currentSection.m.Dates = *dates
+
 	}
 
 	// Build the sections hierarchy
 	for _, sect := range sectionPages {
-		if len(sect.sections) == 1 {
-			sect.parent = home
+		sections := sect.SectionsEntries()
+		if len(sections) == 1 {
+			if home != nil {
+				sect.parent = home
+			}
 		} else {
-			parentSearchKey := path.Join(sect.sections[:len(sect.sections)-1]...)
+			parentSearchKey := path.Join(sect.SectionsEntries()[:len(sections)-1]...)
 			_, v, _ := rootSections.LongestPrefix([]byte(parentSearchKey))
-			p := v.(*Page)
+			p := v.(*pageState)
 			sect.parent = p
 		}
 
-		if sect.parent != nil {
-			sect.parent.subSections = append(sect.parent.subSections, sect)
-		}
+		sect.addSectionToParent()
 	}
 
 	var (
@@ -331,24 +210,13 @@ func (s *Site) assembleSections() Pages {
 		maxSectionWeight     int
 	)
 
-	mainSections, mainSectionsFound = s.Info.Params[sectionsParamIdLower]
+	mainSections, mainSectionsFound = s.Info.Params()[sectionsParamIdLower]
 
 	for _, sect := range sectionPages {
-		if sect.parent != nil {
-			sect.parent.subSections.sort()
-		}
-
-		for i, p := range sect.Pages {
-			if i > 0 {
-				p.NextInSection = sect.Pages[i-1]
-			}
-			if i < len(sect.Pages)-1 {
-				p.PrevInSection = sect.Pages[i+1]
-			}
-		}
+		sect.sortParentSections()
 
 		if !mainSectionsFound {
-			weight := len(sect.Pages) + (len(sect.Sections()) * 5)
+			weight := len(sect.Pages()) + (len(sect.Sections()) * 5)
 			if weight >= maxSectionWeight {
 				mainSections = []string{sect.Section()}
 				maxSectionWeight = weight
@@ -357,16 +225,9 @@ func (s *Site) assembleSections() Pages {
 	}
 
 	// Try to make this as backwards compatible as possible.
-	s.Info.Params[sectionsParamId] = mainSections
-	s.Info.Params[sectionsParamIdLower] = mainSections
+	s.Info.Params()[sectionsParamId] = mainSections
+	s.Info.Params()[sectionsParamIdLower] = mainSections
 
 	return newPages
 
 }
-
-func (p *Page) setPagePages(pages Pages) {
-	pages.sort()
-	p.Pages = pages
-	p.data = make(map[string]interface{})
-	p.data["Pages"] = pages
-}
diff --git a/hugolib/site_sections_test.go b/hugolib/site_sections_test.go
index 1987d2bcb1e..3adfb2b5775 100644
--- a/hugolib/site_sections_test.go
+++ b/hugolib/site_sections_test.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -20,6 +20,7 @@ import (
 	"testing"
 
 	"github.com/gohugoio/hugo/deps"
+	"github.com/gohugoio/hugo/resources/page"
 	"github.com/stretchr/testify/require"
 )
 
@@ -117,65 +118,66 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
 
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
 
-	require.Len(t, s.RegularPages, 21)
+	require.Len(t, s.RegularPages(), 21)
 
 	tests := []struct {
 		sections string
-		verify   func(p *Page)
+		verify   func(assert *require.Assertions, p page.Page)
 	}{
-		{"elsewhere", func(p *Page) {
-			assert.Len(p.Pages, 1)
-			for _, p := range p.Pages {
-				assert.Equal([]string{"elsewhere"}, p.sections)
+		{"elsewhere", func(assert *require.Assertions, p page.Page) {
+			assert.Len(p.Pages(), 1)
+			for _, p := range p.Pages() {
+				assert.Equal("elsewhere", p.SectionsPath())
 			}
 		}},
-		{"post", func(p *Page) {
-			assert.Len(p.Pages, 2)
-			for _, p := range p.Pages {
+		{"post", func(assert *require.Assertions, p page.Page) {
+			assert.Len(p.Pages(), 2)
+			for _, p := range p.Pages() {
 				assert.Equal("post", p.Section())
 			}
 		}},
-		{"empty1", func(p *Page) {
+		{"empty1", func(assert *require.Assertions, p page.Page) {
 			// > b,c
-			assert.NotNil(p.s.getPage(KindSection, "empty1", "b"))
-			assert.NotNil(p.s.getPage(KindSection, "empty1", "b", "c"))
+			assert.NotNil(getPage(p, "/empty1/b"))
+			assert.NotNil(getPage(p, "/empty1/b/c"))
 
 		}},
-		{"empty2", func(p *Page) {
+		{"empty2", func(assert *require.Assertions, p page.Page) {
 			// > b,c,d where b and d have content files.
-			b := p.s.getPage(KindSection, "empty2", "b")
+			b := getPage(p, "/empty2/b")
 			assert.NotNil(b)
-			assert.Equal("T40_-1", b.title)
-			c := p.s.getPage(KindSection, "empty2", "b", "c")
+			assert.Equal("T40_-1", b.Title())
+			c := getPage(p, "/empty2/b/c")
+
 			assert.NotNil(c)
-			assert.Equal("Cs", c.title)
-			d := p.s.getPage(KindSection, "empty2", "b", "c", "d")
+			assert.Equal("Cs", c.Title())
+			d := getPage(p, "/empty2/b/c/d")
+
 			assert.NotNil(d)
-			assert.Equal("T41_-1", d.title)
+			assert.Equal("T41_-1", d.Title())
 
 			assert.False(c.Eq(d))
 			assert.True(c.Eq(c))
 			assert.False(c.Eq("asdf"))
 
 		}},
-		{"empty3", func(p *Page) {
+		{"empty3", func(assert *require.Assertions, p page.Page) {
 			// b,c,d with regular page in b
-			b := p.s.getPage(KindSection, "empty3", "b")
+			b := getPage(p, "/empty3/b")
 			assert.NotNil(b)
-			assert.Len(b.Pages, 1)
-			assert.Equal("empty3.md", b.Pages[0].File.LogicalName())
+			assert.Len(b.Pages(), 1)
+			assert.Equal("empty3.md", b.Pages()[0].File().LogicalName())
 
 		}},
-		{"empty3", func(p *Page) {
-			xxx := p.s.getPage(KindPage, "empty3", "nil")
+		{"empty3", func(assert *require.Assertions, p page.Page) {
+			xxx := getPage(p, "/empty3/nil")
 			assert.Nil(xxx)
-			assert.Equal(xxx.Eq(nil), true)
 		}},
-		{"top", func(p *Page) {
-			assert.Equal("Tops", p.title)
-			assert.Len(p.Pages, 2)
-			assert.Equal("mypage2.md", p.Pages[0].LogicalName())
-			assert.Equal("mypage3.md", p.Pages[1].LogicalName())
+		{"top", func(assert *require.Assertions, p page.Page) {
+			assert.Equal("Tops", p.Title())
+			assert.Len(p.Pages(), 2)
+			assert.Equal("mypage2.md", p.Pages()[0].File().LogicalName())
+			assert.Equal("mypage3.md", p.Pages()[1].File().LogicalName())
 			home := p.Parent()
 			assert.True(home.IsHome())
 			assert.Len(p.Sections(), 0)
@@ -185,30 +187,31 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
 			assert.True(active)
 			assert.Equal(p, p.FirstSection())
 		}},
-		{"l1", func(p *Page) {
-			assert.Equal("L1s", p.title)
-			assert.Len(p.Pages, 2)
+		{"l1", func(assert *require.Assertions, p page.Page) {
+			assert.Equal("L1s", p.Title())
+			assert.Len(p.Pages(), 2)
 			assert.True(p.Parent().IsHome())
 			assert.Len(p.Sections(), 2)
 		}},
-		{"l1,l2", func(p *Page) {
-			assert.Equal("T2_-1", p.title)
-			assert.Len(p.Pages, 3)
-			assert.Equal(p, p.Pages[0].Parent())
-			assert.Equal("L1s", p.Parent().title)
-			assert.Equal("/l1/l2/", p.URLPath.URL)
+		{"l1,l2", func(assert *require.Assertions, p page.Page) {
+			assert.Equal("T2_-1", p.Title())
+			assert.Len(p.Pages(), 3)
+			assert.Equal(p, p.Pages()[0].Parent())
+			assert.Equal("L1s", p.Parent().Title())
 			assert.Equal("/l1/l2/", p.RelPermalink())
 			assert.Len(p.Sections(), 1)
 
-			for _, child := range p.Pages {
+			for _, child := range p.Pages() {
+
 				assert.Equal(p, child.CurrentSection())
 				active, err := child.InSection(p)
 				assert.NoError(err)
+
 				assert.True(active)
 				active, err = p.InSection(child)
 				assert.NoError(err)
 				assert.True(active)
-				active, err = p.InSection(p.s.getPage(KindHome))
+				active, err = p.InSection(getPage(p, "/"))
 				assert.NoError(err)
 				assert.False(active)
 
@@ -227,25 +230,25 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
 				assert.True(isDescendant)
 			}
 
-			assert.Equal(p, p.CurrentSection())
+			assert.True(p.Eq(p.CurrentSection()))
 
 		}},
-		{"l1,l2_2", func(p *Page) {
-			assert.Equal("T22_-1", p.title)
-			assert.Len(p.Pages, 2)
-			assert.Equal(filepath.FromSlash("l1/l2_2/page_2_2_1.md"), p.Pages[0].Path())
-			assert.Equal("L1s", p.Parent().title)
+		{"l1,l2_2", func(assert *require.Assertions, p page.Page) {
+			assert.Equal("T22_-1", p.Title())
+			assert.Len(p.Pages(), 2)
+			assert.Equal(filepath.FromSlash("l1/l2_2/page_2_2_1.md"), p.Pages()[0].File().Path())
+			assert.Equal("L1s", p.Parent().Title())
 			assert.Len(p.Sections(), 0)
 		}},
-		{"l1,l2,l3", func(p *Page) {
-			var nilp *Page
+		{"l1,l2,l3", func(assert *require.Assertions, p page.Page) {
+			nilp, _ := p.GetPage("this/does/not/exist")
 
-			assert.Equal("T3_-1", p.title)
-			assert.Len(p.Pages, 2)
-			assert.Equal("T2_-1", p.Parent().title)
+			assert.Equal("T3_-1", p.Title())
+			assert.Len(p.Pages(), 2)
+			assert.Equal("T2_-1", p.Parent().Title())
 			assert.Len(p.Sections(), 0)
 
-			l1 := p.s.getPage(KindSection, "l1")
+			l1 := getPage(p, "/l1")
 			isDescendant, err := l1.IsDescendant(p)
 			assert.NoError(err)
 			assert.False(isDescendant)
@@ -274,32 +277,35 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
 			assert.False(isAncestor)
 
 		}},
-		{"perm a,link", func(p *Page) {
-			assert.Equal("T9_-1", p.title)
+		{"perm a,link", func(assert *require.Assertions, p page.Page) {
+			assert.Equal("T9_-1", p.Title())
 			assert.Equal("/perm-a/link/", p.RelPermalink())
-			assert.Len(p.Pages, 4)
-			first := p.Pages[0]
+			assert.Len(p.Pages(), 4)
+			first := p.Pages()[0]
 			assert.Equal("/perm-a/link/t1_1/", first.RelPermalink())
 			th.assertFileContent("public/perm-a/link/t1_1/index.html", "Single|T1_1")
 
-			last := p.Pages[3]
+			last := p.Pages()[3]
 			assert.Equal("/perm-a/link/t1_5/", last.RelPermalink())
 
 		}},
 	}
 
-	home := s.getPage(KindHome)
+	home := s.getPage(page.KindHome)
 
 	for _, test := range tests {
-		sections := strings.Split(test.sections, ",")
-		p := s.getPage(KindSection, sections...)
-		assert.NotNil(p, fmt.Sprint(sections))
-
-		if p.Pages != nil {
-			assert.Equal(p.Pages, p.data["Pages"])
-		}
-		assert.NotNil(p.Parent(), fmt.Sprintf("Parent nil: %q", test.sections))
-		test.verify(p)
+		t.Run(fmt.Sprintf("sections %s", test.sections), func(t *testing.T) {
+			assert := require.New(t)
+			sections := strings.Split(test.sections, ",")
+			p := s.getPage(page.KindSection, sections...)
+			assert.NotNil(p, fmt.Sprint(sections))
+
+			if p.Pages() != nil {
+				assert.Equal(p.Pages(), p.Data().(page.Data).Pages())
+			}
+			assert.NotNil(p.Parent(), fmt.Sprintf("Parent nil: %q", test.sections))
+			test.verify(assert, p)
+		})
 	}
 
 	assert.NotNil(home)
@@ -307,7 +313,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
 	assert.Len(home.Sections(), 9)
 	assert.Equal(home.Sections(), s.Info.Sections())
 
-	rootPage := s.getPage(KindPage, "mypage.md")
+	rootPage := s.getPage(page.KindPage, "mypage.md")
 	assert.NotNil(rootPage)
 	assert.True(rootPage.Parent().IsHome())
 
@@ -317,7 +323,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
 	// If we later decide to do something about this, we will have to do some normalization in
 	// getPage.
 	// TODO(bep)
-	sectionWithSpace := s.getPage(KindSection, "Spaces in Section")
+	sectionWithSpace := s.getPage(page.KindSection, "Spaces in Section")
 	require.NotNil(t, sectionWithSpace)
 	require.Equal(t, "/spaces-in-section/", sectionWithSpace.RelPermalink())
 
diff --git a/hugolib/site_test.go b/hugolib/site_test.go
index bf46c313abe..98fe1ff4f8c 100644
--- a/hugolib/site_test.go
+++ b/hugolib/site_test.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -15,6 +15,7 @@ package hugolib
 
 import (
 	"fmt"
+	"os"
 	"path/filepath"
 	"strings"
 	"testing"
@@ -24,6 +25,7 @@ import (
 	"github.com/gohugoio/hugo/helpers"
 
 	"github.com/gohugoio/hugo/deps"
+	"github.com/gohugoio/hugo/resources/page"
 	"github.com/stretchr/testify/assert"
 	"github.com/stretchr/testify/require"
 )
@@ -77,13 +79,13 @@ func TestDraftAndFutureRender(t *testing.T) {
 
 	// Testing Defaults.. Only draft:true and publishDate in the past should be rendered
 	s := siteSetup(t)
-	if len(s.RegularPages) != 1 {
+	if len(s.RegularPages()) != 1 {
 		t.Fatal("Draft or Future dated content published unexpectedly")
 	}
 
 	// only publishDate in the past should be rendered
 	s = siteSetup(t, "buildDrafts", true)
-	if len(s.RegularPages) != 2 {
+	if len(s.RegularPages()) != 2 {
 		t.Fatal("Future Dated Posts published unexpectedly")
 	}
 
@@ -92,7 +94,7 @@ func TestDraftAndFutureRender(t *testing.T) {
 		"buildDrafts", false,
 		"buildFuture", true)
 
-	if len(s.RegularPages) != 2 {
+	if len(s.RegularPages()) != 2 {
 		t.Fatal("Draft posts published unexpectedly")
 	}
 
@@ -101,7 +103,7 @@ func TestDraftAndFutureRender(t *testing.T) {
 		"buildDrafts", true,
 		"buildFuture", true)
 
-	if len(s.RegularPages) != 4 {
+	if len(s.RegularPages()) != 4 {
 		t.Fatal("Drafts or Future posts not included as expected")
 	}
 
@@ -128,17 +130,17 @@ func TestFutureExpirationRender(t *testing.T) {
 
 	s := siteSetup(t)
 
-	if len(s.AllPages) != 1 {
-		if len(s.RegularPages) > 1 {
+	if len(s.AllPages()) != 1 {
+		if len(s.RegularPages()) > 1 {
 			t.Fatal("Expired content published unexpectedly")
 		}
 
-		if len(s.RegularPages) < 1 {
+		if len(s.RegularPages()) < 1 {
 			t.Fatal("Valid content expired unexpectedly")
 		}
 	}
 
-	if s.AllPages[0].title == "doc2" {
+	if s.AllPages()[0].Title() == "doc2" {
 		t.Fatal("Expired content published unexpectedly")
 	}
 }
@@ -156,8 +158,8 @@ func TestLastChange(t *testing.T) {
 
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
 
-	require.False(t, s.Info.LastChange.IsZero(), "Site.LastChange is zero")
-	require.Equal(t, 2017, s.Info.LastChange.Year(), "Site.LastChange should be set to the page with latest Lastmod (year 2017)")
+	require.False(t, s.Info.LastChange().IsZero(), "Site.LastChange is zero")
+	require.Equal(t, 2017, s.Info.LastChange().Year(), "Site.LastChange should be set to the page with latest Lastmod (year 2017)")
 }
 
 // Issue #_index
@@ -170,7 +172,7 @@ func TestPageWithUnderScoreIndexInFilename(t *testing.T) {
 
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
 
-	require.Len(t, s.RegularPages, 1)
+	require.Len(t, s.RegularPages(), 1)
 
 }
 
@@ -255,7 +257,7 @@ THE END.`, refShortcode),
 			WithTemplate: createWithTemplateFromNameValues("_default/single.html", "{{.Content}}")},
 		BuildCfg{})
 
-	require.Len(t, s.RegularPages, 4)
+	require.Len(t, s.RegularPages(), 4)
 
 	th := testHelper{s.Cfg, s.Fs, t}
 
@@ -328,13 +330,13 @@ func doTestShouldAlwaysHaveUglyURLs(t *testing.T, uglyURLs bool) {
 		{filepath.FromSlash("public/index.html"), "Home Sweet Home."},
 		{filepath.FromSlash(expectedPagePath), "\n\n<h1 id=\"title\">title</h1>\n\n<p>some <em>content</em></p>\n"},
 		{filepath.FromSlash("public/404.html"), "Page Not Found."},
-		{filepath.FromSlash("public/index.xml"), "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>\n<root>RSS</root>"},
-		{filepath.FromSlash("public/sitemap.xml"), "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>\n<root>SITEMAP</root>"},
+		{filepath.FromSlash("public/index.xml"), "<root>RSS</root>"},
+		{filepath.FromSlash("public/sitemap.xml"), "<root>SITEMAP</root>"},
 		// Issue #1923
 		{filepath.FromSlash("public/ugly.html"), "\n\n<h1 id=\"title\">title</h1>\n\n<p>doc2 <em>content</em></p>\n"},
 	}
 
-	for _, p := range s.RegularPages {
+	for _, p := range s.RegularPages() {
 		assert.False(t, p.IsHome())
 	}
 
@@ -406,7 +408,7 @@ func doTestSectionNaming(t *testing.T, canonify, uglify, pluralize bool) {
 	}
 
 	writeSource(t, fs, filepath.Join("layouts", "_default/single.html"), "{{.Content}}")
-	writeSource(t, fs, filepath.Join("layouts", "_default/list.html"), "{{.Title}}")
+	writeSource(t, fs, filepath.Join("layouts", "_default/list.html"), "{{ .Kind }}|{{.Title}}")
 
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
 
@@ -491,6 +493,7 @@ func TestSkipRender(t *testing.T) {
 	for _, test := range tests {
 		file, err := fs.Destination.Open(test.doc)
 		if err != nil {
+			helpers.PrintFs(fs.Destination, "public", os.Stdout)
 			t.Fatalf("Did not find %s in target.", test.doc)
 		}
 
@@ -610,40 +613,40 @@ func TestOrderedPages(t *testing.T) {
 
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
 
-	if s.getPage(KindSection, "sect").Pages[1].title != "Three" || s.getPage(KindSection, "sect").Pages[2].title != "Four" {
+	if s.getPage(page.KindSection, "sect").Pages()[1].Title() != "Three" || s.getPage(page.KindSection, "sect").Pages()[2].Title() != "Four" {
 		t.Error("Pages in unexpected order.")
 	}
 
-	bydate := s.RegularPages.ByDate()
+	bydate := s.RegularPages().ByDate()
 
-	if bydate[0].title != "One" {
-		t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bydate[0].title)
+	if bydate[0].Title() != "One" {
+		t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bydate[0].Title())
 	}
 
 	rev := bydate.Reverse()
-	if rev[0].title != "Three" {
-		t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rev[0].title)
+	if rev[0].Title() != "Three" {
+		t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rev[0].Title())
 	}
 
-	bypubdate := s.RegularPages.ByPublishDate()
+	bypubdate := s.RegularPages().ByPublishDate()
 
-	if bypubdate[0].title != "One" {
-		t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bypubdate[0].title)
+	if bypubdate[0].Title() != "One" {
+		t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bypubdate[0].Title())
 	}
 
 	rbypubdate := bypubdate.Reverse()
-	if rbypubdate[0].title != "Three" {
-		t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rbypubdate[0].title)
+	if rbypubdate[0].Title() != "Three" {
+		t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rbypubdate[0].Title())
 	}
 
-	bylength := s.RegularPages.ByLength()
-	if bylength[0].title != "One" {
-		t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bylength[0].title)
+	bylength := s.RegularPages().ByLength()
+	if bylength[0].Title() != "One" {
+		t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bylength[0].Title())
 	}
 
 	rbylength := bylength.Reverse()
-	if rbylength[0].title != "Four" {
-		t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Four", rbylength[0].title)
+	if rbylength[0].Title() != "Four" {
+		t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Four", rbylength[0].Title())
 	}
 }
 
@@ -668,7 +671,7 @@ func TestGroupedPages(t *testing.T) {
 	writeSourcesToSource(t, "content", fs, groupedSources...)
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
 
-	rbysection, err := s.RegularPages.GroupBy("Section", "desc")
+	rbysection, err := s.RegularPages().GroupBy("Section", "desc")
 	if err != nil {
 		t.Fatalf("Unable to make PageGroup array: %s", err)
 	}
@@ -682,14 +685,14 @@ func TestGroupedPages(t *testing.T) {
 	if rbysection[2].Key != "sect1" {
 		t.Errorf("PageGroup array in unexpected order. Third group key should be '%s', got '%s'", "sect1", rbysection[2].Key)
 	}
-	if rbysection[0].Pages[0].title != "Four" {
-		t.Errorf("PageGroup has an unexpected page. First group's pages should have '%s', got '%s'", "Four", rbysection[0].Pages[0].title)
+	if rbysection[0].Pages[0].Title() != "Four" {
+		t.Errorf("PageGroup has an unexpected page. First group's pages should have '%s', got '%s'", "Four", rbysection[0].Pages[0].Title())
 	}
 	if len(rbysection[2].Pages) != 2 {
 		t.Errorf("PageGroup has unexpected number of pages. Third group should have '%d' pages, got '%d' pages", 2, len(rbysection[2].Pages))
 	}
 
-	bytype, err := s.RegularPages.GroupBy("Type", "asc")
+	bytype, err := s.RegularPages().GroupBy("Type", "asc")
 	if err != nil {
 		t.Fatalf("Unable to make PageGroup array: %s", err)
 	}
@@ -702,14 +705,14 @@ func TestGroupedPages(t *testing.T) {
 	if bytype[2].Key != "sect3" {
 		t.Errorf("PageGroup array in unexpected order. Third group key should be '%s', got '%s'", "sect3", bytype[2].Key)
 	}
-	if bytype[2].Pages[0].title != "Four" {
-		t.Errorf("PageGroup has an unexpected page. Third group's data should have '%s', got '%s'", "Four", bytype[0].Pages[0].title)
+	if bytype[2].Pages[0].Title() != "Four" {
+		t.Errorf("PageGroup has an unexpected page. Third group's data should have '%s', got '%s'", "Four", bytype[0].Pages[0].Title())
 	}
 	if len(bytype[0].Pages) != 2 {
 		t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(bytype[2].Pages))
 	}
 
-	bydate, err := s.RegularPages.GroupByDate("2006-01", "asc")
+	bydate, err := s.RegularPages().GroupByDate("2006-01", "asc")
 	if err != nil {
 		t.Fatalf("Unable to make PageGroup array: %s", err)
 	}
@@ -720,7 +723,7 @@ func TestGroupedPages(t *testing.T) {
 		t.Errorf("PageGroup array in unexpected order. Second group key should be '%s', got '%s'", "2012-01", bydate[1].Key)
 	}
 
-	bypubdate, err := s.RegularPages.GroupByPublishDate("2006")
+	bypubdate, err := s.RegularPages().GroupByPublishDate("2006")
 	if err != nil {
 		t.Fatalf("Unable to make PageGroup array: %s", err)
 	}
@@ -730,14 +733,14 @@ func TestGroupedPages(t *testing.T) {
 	if bypubdate[1].Key != "0001" {
 		t.Errorf("PageGroup array in unexpected order. Second group key should be '%s', got '%s'", "0001", bypubdate[1].Key)
 	}
-	if bypubdate[0].Pages[0].title != "Three" {
-		t.Errorf("PageGroup has an unexpected page. Third group's pages should have '%s', got '%s'", "Three", bypubdate[0].Pages[0].title)
+	if bypubdate[0].Pages[0].Title() != "Three" {
+		t.Errorf("PageGroup has an unexpected page. Third group's pages should have '%s', got '%s'", "Three", bypubdate[0].Pages[0].Title())
 	}
 	if len(bypubdate[0].Pages) != 3 {
 		t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 3, len(bypubdate[0].Pages))
 	}
 
-	byparam, err := s.RegularPages.GroupByParam("my_param", "desc")
+	byparam, err := s.RegularPages().GroupByParam("my_param", "desc")
 	if err != nil {
 		t.Fatalf("Unable to make PageGroup array: %s", err)
 	}
@@ -750,19 +753,19 @@ func TestGroupedPages(t *testing.T) {
 	if byparam[2].Key != "bar" {
 		t.Errorf("PageGroup array in unexpected order. Third group key should be '%s', got '%s'", "bar", byparam[2].Key)
 	}
-	if byparam[2].Pages[0].title != "Three" {
-		t.Errorf("PageGroup has an unexpected page. Third group's pages should have '%s', got '%s'", "Three", byparam[2].Pages[0].title)
+	if byparam[2].Pages[0].Title() != "Three" {
+		t.Errorf("PageGroup has an unexpected page. Third group's pages should have '%s', got '%s'", "Three", byparam[2].Pages[0].Title())
 	}
 	if len(byparam[0].Pages) != 2 {
 		t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(byparam[0].Pages))
 	}
 
-	_, err = s.RegularPages.GroupByParam("not_exist")
+	_, err = s.RegularPages().GroupByParam("not_exist")
 	if err == nil {
 		t.Errorf("GroupByParam didn't return an expected error")
 	}
 
-	byOnlyOneParam, err := s.RegularPages.GroupByParam("only_one")
+	byOnlyOneParam, err := s.RegularPages().GroupByParam("only_one")
 	if err != nil {
 		t.Fatalf("Unable to make PageGroup array: %s", err)
 	}
@@ -773,7 +776,7 @@ func TestGroupedPages(t *testing.T) {
 		t.Errorf("PageGroup array in unexpected order. First group key should be '%s', got '%s'", "yes", byOnlyOneParam[0].Key)
 	}
 
-	byParamDate, err := s.RegularPages.GroupByParamDate("my_date", "2006-01")
+	byParamDate, err := s.RegularPages().GroupByParamDate("my_date", "2006-01")
 	if err != nil {
 		t.Fatalf("Unable to make PageGroup array: %s", err)
 	}
@@ -783,8 +786,8 @@ func TestGroupedPages(t *testing.T) {
 	if byParamDate[1].Key != "1979-05" {
 		t.Errorf("PageGroup array in unexpected order. Second group key should be '%s', got '%s'", "1979-05", byParamDate[1].Key)
 	}
-	if byParamDate[1].Pages[0].title != "One" {
-		t.Errorf("PageGroup has an unexpected page. Second group's pages should have '%s', got '%s'", "One", byParamDate[1].Pages[0].title)
+	if byParamDate[1].Pages[0].Title() != "One" {
+		t.Errorf("PageGroup has an unexpected page. Second group's pages should have '%s', got '%s'", "One", byParamDate[1].Pages[0].Title())
 	}
 	if len(byParamDate[0].Pages) != 2 {
 		t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(byParamDate[2].Pages))
@@ -840,16 +843,16 @@ func TestWeightedTaxonomies(t *testing.T) {
 	writeSourcesToSource(t, "content", fs, sources...)
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
 
-	if s.Taxonomies["tags"]["a"][0].Page.title != "foo" {
-		t.Errorf("Pages in unexpected order, 'foo' expected first, got '%v'", s.Taxonomies["tags"]["a"][0].Page.title)
+	if s.Taxonomies["tags"]["a"][0].Page.Title() != "foo" {
+		t.Errorf("Pages in unexpected order, 'foo' expected first, got '%v'", s.Taxonomies["tags"]["a"][0].Page.Title())
 	}
 
-	if s.Taxonomies["categories"]["d"][0].Page.title != "bar" {
-		t.Errorf("Pages in unexpected order, 'bar' expected first, got '%v'", s.Taxonomies["categories"]["d"][0].Page.title)
+	if s.Taxonomies["categories"]["d"][0].Page.Title() != "bar" {
+		t.Errorf("Pages in unexpected order, 'bar' expected first, got '%v'", s.Taxonomies["categories"]["d"][0].Page.Title())
 	}
 
-	if s.Taxonomies["categories"]["e"][0].Page.title != "bza" {
-		t.Errorf("Pages in unexpected order, 'bza' expected first, got '%v'", s.Taxonomies["categories"]["e"][0].Page.title)
+	if s.Taxonomies["categories"]["e"][0].Page.Title() != "bza" {
+		t.Errorf("Pages in unexpected order, 'bza' expected first, got '%v'", s.Taxonomies["categories"]["e"][0].Page.Title())
 	}
 }
 
@@ -897,7 +900,7 @@ func TestRefLinking(t *testing.T) {
 	t.Parallel()
 	site := setupLinkingMockSite(t)
 
-	currentPage := site.getPage(KindPage, "level2/level3/start.md")
+	currentPage := site.getPage(page.KindPage, "level2/level3/start.md")
 	if currentPage == nil {
 		t.Fatalf("failed to find current page in site")
 	}
@@ -952,8 +955,8 @@ func TestRefLinking(t *testing.T) {
 	// TODO: and then the failure cases.
 }
 
-func checkLinkCase(site *Site, link string, currentPage *Page, relative bool, outputFormat string, expected string, t *testing.T, i int) {
+func checkLinkCase(site *Site, link string, currentPage page.Page, relative bool, outputFormat string, expected string, t *testing.T, i int) {
 	if out, err := site.refLink(link, currentPage, relative, outputFormat); err != nil || out != expected {
-		t.Errorf("[%d] Expected %q from %q to resolve to %q, got %q - error: %s", i, link, currentPage.absoluteSourceRef(), expected, out, err)
+		t.Fatalf("[%d] Expected %q from %q to resolve to %q, got %q - error: %s", i, link, currentPage.Path(), expected, out, err)
 	}
 }
diff --git a/hugolib/site_url_test.go b/hugolib/site_url_test.go
index 5b9d19e0dd1..10aa3bb282a 100644
--- a/hugolib/site_url_test.go
+++ b/hugolib/site_url_test.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -18,6 +18,8 @@ import (
 	"path/filepath"
 	"testing"
 
+	"github.com/gohugoio/hugo/resources/page"
+
 	"html/template"
 
 	"github.com/gohugoio/hugo/deps"
@@ -115,14 +117,14 @@ Do not go gentle into that good night.
 
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
 
-	assert.Len(s.RegularPages, 2)
+	assert.Len(s.RegularPages(), 2)
 
-	notUgly := s.getPage(KindPage, "sect1/p1.md")
+	notUgly := s.getPage(page.KindPage, "sect1/p1.md")
 	assert.NotNil(notUgly)
 	assert.Equal("sect1", notUgly.Section())
 	assert.Equal("/sect1/p1/", notUgly.RelPermalink())
 
-	ugly := s.getPage(KindPage, "sect2/p2.md")
+	ugly := s.getPage(page.KindPage, "sect2/p2.md")
 	assert.NotNil(ugly)
 	assert.Equal("sect2", ugly.Section())
 	assert.Equal("/sect2/p2.html", ugly.RelPermalink())
@@ -173,9 +175,9 @@ Do not go gentle into that good night.
 
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
 
-	assert.Len(s.RegularPages, 10)
+	assert.Len(s.RegularPages(), 10)
 
-	sect1 := s.getPage(KindSection, "sect1")
+	sect1 := s.getPage(page.KindSection, "sect1")
 	assert.NotNil(sect1)
 	assert.Equal("/ss1/", sect1.RelPermalink())
 	th.assertFileContent(filepath.Join("public", "ss1", "index.html"), "P1|URL: /ss1/|Next: /ss1/page/2/")
diff --git a/hugolib/sitemap_test.go b/hugolib/sitemap_test.go
index 002f772d83f..cab13d356bc 100644
--- a/hugolib/sitemap_test.go
+++ b/hugolib/sitemap_test.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -18,10 +18,10 @@ import (
 
 	"reflect"
 
-	"github.com/stretchr/testify/require"
-
+	"github.com/gohugoio/hugo/config"
 	"github.com/gohugoio/hugo/deps"
 	"github.com/gohugoio/hugo/tpl"
+	"github.com/stretchr/testify/require"
 )
 
 const sitemapTemplate = `<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
@@ -86,14 +86,14 @@ func doTestSitemapOutput(t *testing.T, internal bool) {
 
 func TestParseSitemap(t *testing.T) {
 	t.Parallel()
-	expected := Sitemap{Priority: 3.0, Filename: "doo.xml", ChangeFreq: "3"}
+	expected := config.Sitemap{Priority: 3.0, Filename: "doo.xml", ChangeFreq: "3"}
 	input := map[string]interface{}{
 		"changefreq": "3",
 		"priority":   3.0,
 		"filename":   "doo.xml",
 		"unknown":    "ignore",
 	}
-	result := parseSitemap(input)
+	result := config.DecodeSitemap(config.Sitemap{}, input)
 
 	if !reflect.DeepEqual(expected, result) {
 		t.Errorf("Got \n%v expected \n%v", result, expected)
diff --git a/hugolib/taxonomy.go b/hugolib/taxonomy.go
index c8447d1bae0..9d9e4f9ecd1 100644
--- a/hugolib/taxonomy.go
+++ b/hugolib/taxonomy.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -15,7 +15,11 @@ package hugolib
 
 import (
 	"fmt"
+	"path"
 	"sort"
+
+	"github.com/gohugoio/hugo/resources/page"
+	"github.com/gohugoio/hugo/resources/resource"
 )
 
 // The TaxonomyList is a list of all taxonomies and their values
@@ -28,44 +32,30 @@ func (tl TaxonomyList) String() string {
 
 // A Taxonomy is a map of keywords to a list of pages.
 // For example
-//    TagTaxonomy['technology'] = WeightedPages
-//    TagTaxonomy['go']  =  WeightedPages2
-type Taxonomy map[string]WeightedPages
-
-// WeightedPages is a list of Pages with their corresponding (and relative) weight
-// [{Weight: 30, Page: *1}, {Weight: 40, Page: *2}]
-type WeightedPages []WeightedPage
-
-// A WeightedPage is a Page with a weight.
-type WeightedPage struct {
-	Weight int
-	*Page
-}
-
-func (w WeightedPage) String() string {
-	return fmt.Sprintf("WeightedPage(%d,%q)", w.Weight, w.Page.title)
-}
+//    TagTaxonomy['technology'] = page.WeightedPages
+//    TagTaxonomy['go']  =  page.WeightedPages
+type Taxonomy map[string]page.WeightedPages
 
 // OrderedTaxonomy is another representation of an Taxonomy using an array rather than a map.
 // Important because you can't order a map.
 type OrderedTaxonomy []OrderedTaxonomyEntry
 
 // OrderedTaxonomyEntry is similar to an element of a Taxonomy, but with the key embedded (as name)
-// e.g:  {Name: Technology, WeightedPages: Taxonomyedpages}
+// e.g:  {Name: Technology, page.WeightedPages: TaxonomyPages}
 type OrderedTaxonomyEntry struct {
-	Name          string
-	WeightedPages WeightedPages
+	Name string
+	page.WeightedPages
 }
 
 // Get the weighted pages for the given key.
-func (i Taxonomy) Get(key string) WeightedPages {
+func (i Taxonomy) Get(key string) page.WeightedPages {
 	return i[key]
 }
 
 // Count the weighted pages for the given key.
 func (i Taxonomy) Count(key string) int { return len(i[key]) }
 
-func (i Taxonomy) add(key string, w WeightedPage) {
+func (i Taxonomy) add(key string, w page.WeightedPage) {
 	i[key] = append(i[key], w)
 }
 
@@ -110,7 +100,7 @@ func (i Taxonomy) ByCount() OrderedTaxonomy {
 }
 
 // Pages returns the Pages for this taxonomy.
-func (ie OrderedTaxonomyEntry) Pages() Pages {
+func (ie OrderedTaxonomyEntry) Pages() page.Pages {
 	return ie.WeightedPages.Pages()
 }
 
@@ -165,60 +155,81 @@ func (s *orderedTaxonomySorter) Less(i, j int) bool {
 	return s.by(&s.taxonomy[i], &s.taxonomy[j])
 }
 
-// Pages returns the Pages in this weighted page set.
-func (wp WeightedPages) Pages() Pages {
-	pages := make(Pages, len(wp))
-	for i := range wp {
-		pages[i] = wp[i].Page
-	}
-	return pages
-}
-
-// Prev returns the previous Page relative to the given Page in
-// this weighted page set.
-func (wp WeightedPages) Prev(cur *Page) *Page {
-	for x, c := range wp {
-		if c.Page.UniqueID() == cur.UniqueID() {
-			if x == 0 {
-				return wp[len(wp)-1].Page
-			}
-			return wp[x-1].Page
-		}
-	}
-	return nil
+// taxonomyNodeInfo stores additional metadata about a taxonomy.
+type taxonomyNodeInfo struct {
+	plural string
+
+	// Maps "tags" to "tag".
+	singular string
+
+	// The term key as used in the taxonomy map, e.g "tag1".
+	// The value is normalized for paths, but may or not be lowercased
+	// depending on the disablePathToLower setting.
+	termKey string
+
+	// The original, unedited term name. Useful for titles etc.
+	term string
+
+	dates resource.Dates
+
+	parent *taxonomyNodeInfo
+
+	// Either of Kind taxonomyTerm (parent) or taxonomy
+	owner page.Page
 }
 
-// Next returns the next Page relative to the given Page in
-// this weighted page set.
-func (wp WeightedPages) Next(cur *Page) *Page {
-	for x, c := range wp {
-		if c.Page.UniqueID() == cur.UniqueID() {
-			if x < len(wp)-1 {
-				return wp[x+1].Page
-			}
-			return wp[0].Page
-		}
+func (t *taxonomyNodeInfo) UpdateFromPage(p page.Page) {
+
+	// Select the latest dates
+	t.dates.UpdateDateAndLastmodIfAfter(p)
+}
+
+func (t *taxonomyNodeInfo) TransferValues(p *pageState) {
+	t.owner = p
+	if p.Lastmod().IsZero() && p.Date().IsZero() {
+		p.m.Dates.UpdateDateAndLastmodIfAfter(t.dates)
 	}
-	return nil
 }
 
-func (wp WeightedPages) Len() int      { return len(wp) }
-func (wp WeightedPages) Swap(i, j int) { wp[i], wp[j] = wp[j], wp[i] }
+// callback sent to the child nodes.
+func (t *taxonomyNodeInfo) getOwner() page.Page {
+	return t.owner
+}
 
-// Sort stable sorts this weighted page set.
-func (wp WeightedPages) Sort() { sort.Stable(wp) }
+// Maps either plural or plural/term to a taxonomy node.
+// TODO(bep) consolidate somehow with s.Taxonomies
+type taxonomyNodeInfos map[string]*taxonomyNodeInfo
 
-// Count returns the number of pages in this weighted page set.
-func (wp WeightedPages) Count() int { return len(wp) }
+func (t taxonomyNodeInfos) key(parts ...string) string {
+	return path.Join(parts...)
+}
 
-func (wp WeightedPages) Less(i, j int) bool {
-	if wp[i].Weight == wp[j].Weight {
-		if wp[i].Page.Date.Equal(wp[j].Page.Date) {
-			return wp[i].Page.title < wp[j].Page.title
-		}
-		return wp[i].Page.Date.After(wp[i].Page.Date)
+func (t taxonomyNodeInfos) GetOrCreate(plural, termKey, term string) *taxonomyNodeInfo {
+	key := t.key(plural, termKey)
+
+	n, found := t[key]
+	if found {
+		return n
 	}
-	return wp[i].Weight < wp[j].Weight
+
+	n = &taxonomyNodeInfo{
+		plural:  plural,
+		termKey: termKey,
+		term:    term,
+	}
+
+	t[key] = n
+
+	return n
 }
 
-// TODO mimic PagesSorter for WeightedPages
+func (t taxonomyNodeInfos) Get(sections ...string) *taxonomyNodeInfo {
+	key := t.key(sections...)
+
+	n, found := t[key]
+	if found {
+		return n
+	}
+
+	return nil
+}
diff --git a/hugolib/taxonomy_test.go b/hugolib/taxonomy_test.go
index 1ae9fae228f..2501ed2e4de 100644
--- a/hugolib/taxonomy_test.go
+++ b/hugolib/taxonomy_test.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -16,6 +16,9 @@ package hugolib
 import (
 	"fmt"
 	"path/filepath"
+
+	"github.com/gohugoio/hugo/resources/page"
+
 	"reflect"
 	"strings"
 	"testing"
@@ -25,7 +28,7 @@ import (
 	"github.com/gohugoio/hugo/deps"
 )
 
-func TestByCountOrderOfTaxonomies(t *testing.T) {
+func TestTaxonomiesCountOrder(t *testing.T) {
 	t.Parallel()
 	taxonomies := make(map[string]string)
 
@@ -36,37 +39,42 @@ func TestByCountOrderOfTaxonomies(t *testing.T) {
 
 	cfg.Set("taxonomies", taxonomies)
 
-	writeSource(t, fs, filepath.Join("content", "page.md"), pageYamlWithTaxonomiesA)
+	const pageContent = `---
+tags: ['a', 'B', 'c']
+categories: 'd'
+---
+YAML frontmatter with tags and categories taxonomy.`
+
+	writeSource(t, fs, filepath.Join("content", "page.md"), pageContent)
 
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
 
 	st := make([]string, 0)
 	for _, t := range s.Taxonomies["tags"].ByCount() {
-		st = append(st, t.Name)
+		st = append(st, t.Page().Title()+":"+t.Name)
 	}
 
-	if !reflect.DeepEqual(st, []string{"a", "b", "c"}) {
-		t.Fatalf("ordered taxonomies do not match [a, b, c].  Got: %s", st)
+	expect := []string{"a:a", "B:b", "c:c"}
+
+	if !reflect.DeepEqual(st, expect) {
+		t.Fatalf("ordered taxonomies mismatch, expected\n%v\ngot\n%q", expect, st)
 	}
 }
 
 //
 func TestTaxonomiesWithAndWithoutContentFile(t *testing.T) {
 	for _, uglyURLs := range []bool{false, true} {
-		for _, preserveTaxonomyNames := range []bool{false, true} {
-			t.Run(fmt.Sprintf("uglyURLs=%t,preserveTaxonomyNames=%t", uglyURLs, preserveTaxonomyNames), func(t *testing.T) {
-				doTestTaxonomiesWithAndWithoutContentFile(t, preserveTaxonomyNames, uglyURLs)
-			})
-		}
+		t.Run(fmt.Sprintf("uglyURLs=%t", uglyURLs), func(t *testing.T) {
+			doTestTaxonomiesWithAndWithoutContentFile(t, uglyURLs)
+		})
 	}
 }
 
-func doTestTaxonomiesWithAndWithoutContentFile(t *testing.T, preserveTaxonomyNames, uglyURLs bool) {
+func doTestTaxonomiesWithAndWithoutContentFile(t *testing.T, uglyURLs bool) {
 	t.Parallel()
 
 	siteConfig := `
 baseURL = "http://example.com/blog"
-preserveTaxonomyNames = %t
 uglyURLs = %t
 paginate = 1
 defaultContentLanguage = "en"
@@ -94,23 +102,17 @@ permalinkeds:
 # Doc
 `
 
-	siteConfig = fmt.Sprintf(siteConfig, preserveTaxonomyNames, uglyURLs)
+	siteConfig = fmt.Sprintf(siteConfig, uglyURLs)
 
 	th, h := newTestSitesFromConfigWithDefaultTemplates(t, siteConfig)
 	require.Len(t, h.Sites, 1)
 
 	fs := th.Fs
 
-	if preserveTaxonomyNames {
-		writeSource(t, fs, "content/p1.md", fmt.Sprintf(pageTemplate, "t1/c1", "- tag1", "- cat1", "- o1", "- pl1"))
-	} else {
-		// Check lower-casing of tags
-		writeSource(t, fs, "content/p1.md", fmt.Sprintf(pageTemplate, "t1/c1", "- Tag1", "- cAt1", "- o1", "- pl1"))
-
-	}
-	writeSource(t, fs, "content/p2.md", fmt.Sprintf(pageTemplate, "t2/c1", "- tag2", "- cat1", "- o1", "- pl1"))
-	writeSource(t, fs, "content/p3.md", fmt.Sprintf(pageTemplate, "t2/c12", "- tag2", "- cat2", "- o1", "- pl1"))
-	writeSource(t, fs, "content/p4.md", fmt.Sprintf(pageTemplate, "Hello World", "", "", "- \"Hello Hugo world\"", "- pl1"))
+	writeSource(t, fs, "content/p1.md", fmt.Sprintf(pageTemplate, "t1/c1", "- Tag1", "- cAt1", "- o1", "- Pl1"))
+	writeSource(t, fs, "content/p2.md", fmt.Sprintf(pageTemplate, "t2/c1", "- tag2", "- cAt1", "- o1", "- Pl1"))
+	writeSource(t, fs, "content/p3.md", fmt.Sprintf(pageTemplate, "t2/c12", "- tag2", "- cat2", "- o1", "- Pl1"))
+	writeSource(t, fs, "content/p4.md", fmt.Sprintf(pageTemplate, "Hello World", "", "", "- \"Hello Hugo world\"", "- Pl1"))
 
 	writeNewContentFile(t, fs.Source, "Category Terms", "2017-01-01", "content/categories/_index.md", 10)
 	writeNewContentFile(t, fs.Source, "Tag1 List", "2017-01-01", "content/tags/Tag1/_index.md", 10)
@@ -133,45 +135,29 @@ permalinkeds:
 	}
 
 	// 1.
-	if preserveTaxonomyNames {
-		th.assertFileContent(pathFunc("public/categories/cat1/index.html"), "List", "cat1")
-	} else {
-		th.assertFileContent(pathFunc("public/categories/cat1/index.html"), "List", "Cat1")
-	}
-
+	th.assertFileContent(pathFunc("public/categories/cat1/index.html"), "List", "cAt1")
 	th.assertFileContent(pathFunc("public/categories/index.html"), "Terms List", "Category Terms")
 
 	// 2.
-	if preserveTaxonomyNames {
-		th.assertFileContent(pathFunc("public/tags/tag2/index.html"), "List", "tag2")
-	} else {
-		th.assertFileContent(pathFunc("public/tags/tag2/index.html"), "List", "Tag2")
-	}
+	th.assertFileContent(pathFunc("public/tags/tag2/index.html"), "List", "tag2")
 	th.assertFileContent(pathFunc("public/tags/tag1/index.html"), "List", "Tag1")
 	th.assertFileContent(pathFunc("public/tags/index.html"), "Terms List", "Tags")
 
 	// 3.
-	if preserveTaxonomyNames {
-		th.assertFileContent(pathFunc("public/others/o1/index.html"), "List", "o1")
-	} else {
-		th.assertFileContent(pathFunc("public/others/o1/index.html"), "List", "O1")
-	}
+	th.assertFileContent(pathFunc("public/others/o1/index.html"), "List", "o1")
 	th.assertFileContent(pathFunc("public/others/index.html"), "Terms List", "Others")
 
 	// 4.
-	if preserveTaxonomyNames {
-		th.assertFileContent(pathFunc("public/perma/pl1/index.html"), "List", "pl1")
-	} else {
-		th.assertFileContent(pathFunc("public/perma/pl1/index.html"), "List", "Pl1")
-	}
+	th.assertFileContent(pathFunc("public/perma/pl1/index.html"), "List", "Pl1")
+
 	// This looks kind of funky, but the taxonomy terms do not have a permalinks definition,
 	// for good reasons.
 	th.assertFileContent(pathFunc("public/permalinkeds/index.html"), "Terms List", "Permalinkeds")
 
 	s := h.Sites[0]
 
-	// Make sure that each KindTaxonomyTerm page has an appropriate number
-	// of KindTaxonomy pages in its Pages slice.
+	// Make sure that each page.KindTaxonomyTerm page has an appropriate number
+	// of page.KindTaxonomy pages in its Pages slice.
 	taxonomyTermPageCounts := map[string]int{
 		"tags":         2,
 		"categories":   2,
@@ -181,16 +167,16 @@ permalinkeds:
 	}
 
 	for taxonomy, count := range taxonomyTermPageCounts {
-		term := s.getPage(KindTaxonomyTerm, taxonomy)
+		term := s.getPage(page.KindTaxonomyTerm, taxonomy)
 		require.NotNil(t, term)
-		require.Len(t, term.Pages, count)
+		require.Len(t, term.Pages(), count)
 
-		for _, page := range term.Pages {
-			require.Equal(t, KindTaxonomy, page.Kind)
+		for _, p := range term.Pages() {
+			require.Equal(t, page.KindTaxonomy, p.Kind())
 		}
 	}
 
-	cat1 := s.getPage(KindTaxonomy, "categories", "cat1")
+	cat1 := s.getPage(page.KindTaxonomy, "categories", "cat1")
 	require.NotNil(t, cat1)
 	if uglyURLs {
 		require.Equal(t, "/blog/categories/cat1.html", cat1.RelPermalink())
@@ -198,8 +184,8 @@ permalinkeds:
 		require.Equal(t, "/blog/categories/cat1/", cat1.RelPermalink())
 	}
 
-	pl1 := s.getPage(KindTaxonomy, "permalinkeds", "pl1")
-	permalinkeds := s.getPage(KindTaxonomyTerm, "permalinkeds")
+	pl1 := s.getPage(page.KindTaxonomy, "permalinkeds", "pl1")
+	permalinkeds := s.getPage(page.KindTaxonomyTerm, "permalinkeds")
 	require.NotNil(t, pl1)
 	require.NotNil(t, permalinkeds)
 	if uglyURLs {
@@ -210,16 +196,9 @@ permalinkeds:
 		require.Equal(t, "/blog/permalinkeds/", permalinkeds.RelPermalink())
 	}
 
-	// Issue #3070 preserveTaxonomyNames
-	if preserveTaxonomyNames {
-		helloWorld := s.getPage(KindTaxonomy, "others", "Hello Hugo world")
-		require.NotNil(t, helloWorld)
-		require.Equal(t, "Hello Hugo world", helloWorld.title)
-	} else {
-		helloWorld := s.getPage(KindTaxonomy, "others", "hello-hugo-world")
-		require.NotNil(t, helloWorld)
-		require.Equal(t, "Hello Hugo World", helloWorld.title)
-	}
+	helloWorld := s.getPage(page.KindTaxonomy, "others", "hello-hugo-world")
+	require.NotNil(t, helloWorld)
+	require.Equal(t, "Hello Hugo world", helloWorld.Title())
 
 	// Issue #2977
 	th.assertFileContent(pathFunc("public/empties/index.html"), "Terms List", "Empties")
@@ -282,21 +261,65 @@ title: "This is S3s"
 
 	s := b.H.Sites[0]
 
-	ta := s.findPagesByKind(KindTaxonomy)
-	te := s.findPagesByKind(KindTaxonomyTerm)
+	ta := s.findPagesByKind(page.KindTaxonomy)
+	te := s.findPagesByKind(page.KindTaxonomyTerm)
 
 	assert.Equal(4, len(te))
 	assert.Equal(7, len(ta))
 
-	b.AssertFileContent("public/news/categories/a/index.html", "Taxonomy List Page 1|A|Hello|https://example.com/news/categories/a/|")
+	b.AssertFileContent("public/news/categories/a/index.html", "Taxonomy List Page 1|a|Hello|https://example.com/news/categories/a/|")
 	b.AssertFileContent("public/news/categories/b/index.html", "Taxonomy List Page 1|This is B|Hello|https://example.com/news/categories/b/|")
-	b.AssertFileContent("public/news/categories/d/e/index.html", "Taxonomy List Page 1|D/E|Hello|https://example.com/news/categories/d/e/|")
+	b.AssertFileContent("public/news/categories/d/e/index.html", "Taxonomy List Page 1|d/e|Hello|https://example.com/news/categories/d/e/|")
 	b.AssertFileContent("public/news/categories/f/g/h/index.html", "Taxonomy List Page 1|This is H|Hello|https://example.com/news/categories/f/g/h/|")
 	b.AssertFileContent("public/t1/t2/t3s/t4/t5/index.html", "Taxonomy List Page 1|This is T5|Hello|https://example.com/t1/t2/t3s/t4/t5/|")
-	b.AssertFileContent("public/t1/t2/t3s/t4/t5/t6/index.html", "Taxonomy List Page 1|T4/T5/T6|Hello|https://example.com/t1/t2/t3s/t4/t5/t6/|")
+	b.AssertFileContent("public/t1/t2/t3s/t4/t5/t6/index.html", "Taxonomy List Page 1|t4/t5/t6|Hello|https://example.com/t1/t2/t3s/t4/t5/t6/|")
 
 	b.AssertFileContent("public/news/categories/index.html", "Taxonomy Term Page 1|News/Categories|Hello|https://example.com/news/categories/|")
 	b.AssertFileContent("public/t1/t2/t3s/index.html", "Taxonomy Term Page 1|T1/T2/T3s|Hello|https://example.com/t1/t2/t3s/|")
 	b.AssertFileContent("public/s1/s2/s3s/index.html", "Taxonomy Term Page 1|This is S3s|Hello|https://example.com/s1/s2/s3s/|")
 
 }
+
+// https://github.com/gohugoio/hugo/issues/5719
+func TestTaxonomiesNextGenLoops(t *testing.T) {
+	b := newTestSitesBuilder(t).WithSimpleConfigFile()
+
+	b.WithTemplatesAdded("index.html", `
+<h1>Tags</h1>
+<ul>
+    {{ range .Site.Taxonomies.tags }}
+            <li><a href="{{ .Page.Permalink }}">{{ .Page.Title }}</a> {{ .Count }}</li>
+    {{ end }}
+</ul>
+
+`)
+
+	b.WithTemplatesAdded("_default/terms.html", `
+<h1>Terms</h1>
+<ul>
+    {{ range .Data.Terms.Alphabetical }}
+            <li><a href="{{ .Page.Permalink }}">{{ .Page.Title }}</a> {{ .Count }}</li>
+    {{ end }}
+</ul>
+`)
+
+	for i := 0; i < 10; i++ {
+		b.WithContent(fmt.Sprintf("page%d.md", i+1), `
+---
+Title: "Taxonomy!"
+tags: ["Hugo Rocks!", "Rocks I say!" ]
+categories: ["This is Cool", "And new" ]
+---
+
+Content.
+		
+		`)
+	}
+
+	b.CreateSites().Build(BuildCfg{})
+
+	b.AssertFileContent("public/index.html", `<li><a href="http://example.com/tags/hugo-rocks/">Hugo Rocks!</a> 10</li>`)
+	b.AssertFileContent("public/categories/index.html", `<li><a href="http://example.com/categories/this-is-cool/">This is Cool</a> 10</li>`)
+	b.AssertFileContent("public/tags/index.html", `<li><a href="http://example.com/tags/rocks-i-say/">Rocks I say!</a> 10</li>`)
+
+}
diff --git a/hugolib/testhelpers_test.go b/hugolib/testhelpers_test.go
index 64d1ff96a23..9521110b329 100644
--- a/hugolib/testhelpers_test.go
+++ b/hugolib/testhelpers_test.go
@@ -14,11 +14,11 @@ import (
 	"strings"
 	"text/template"
 
-	"github.com/gohugoio/hugo/langs"
-	"github.com/sanity-io/litter"
-
+	"github.com/gohugoio/hugo/common/herrors"
 	"github.com/gohugoio/hugo/config"
 	"github.com/gohugoio/hugo/deps"
+	"github.com/gohugoio/hugo/resources/page"
+	"github.com/sanity-io/litter"
 	"github.com/spf13/afero"
 
 	"github.com/gohugoio/hugo/helpers"
@@ -387,6 +387,7 @@ func (s *sitesBuilder) build(cfg BuildCfg, shouldFail bool) *sitesBuilder {
 		}
 	}
 	if err != nil && !shouldFail {
+		herrors.PrintStackTrace(err)
 		s.Fatalf("Build failed: %s", err)
 	} else if err == nil && shouldFail {
 		s.Fatalf("Expected error")
@@ -418,10 +419,10 @@ date: "2018-02-28"
 			"content/sect/doc1.nn.md", contentTemplate,
 		}
 
-		listTemplateCommon = "{{ $p := .Paginator }}{{ $p.PageNumber }}|{{ .Title }}|{{ i18n \"hello\" }}|{{ .Permalink }}|Pager: {{ template \"_internal/pagination.html\" . }}"
+		listTemplateCommon = "{{ $p := .Paginator }}{{ $p.PageNumber }}|{{ .Title }}|{{ i18n \"hello\" }}|{{ .Permalink }}|Pager: {{ template \"_internal/pagination.html\" . }}|Kind: {{ .Kind }}"
 
 		defaultTemplates = []string{
-			"_default/single.html", "Single: {{ .Title }}|{{ i18n \"hello\" }}|{{.Lang}}|{{ .Content }}",
+			"_default/single.html", "Single: {{ .Title }}|{{ i18n \"hello\" }}|{{.Language.Lang}}|RelPermalink: {{ .RelPermalink }}|Permalink: {{ .Permalink }}|{{ .Content }}|Resources: {{ range .Resources }}{{ .MediaType }}: {{ .RelPermalink}} -- {{ end }}|Summary: {{ .Summary }}|Truncated: {{ .Truncated }}",
 			"_default/list.html", "List Page " + listTemplateCommon,
 			"index.html", "{{ $p := .Paginator }}Default Home Page {{ $p.PageNumber }}: {{ .Title }}|{{ .IsHome }}|{{ i18n \"hello\" }}|{{ .Permalink }}|{{  .Site.Data.hugo.slogan }}|String Resource: {{ ( \"Hugo Pipes\" | resources.FromString \"text/pipes.txt\").RelPermalink  }}",
 			"index.fr.html", "{{ $p := .Paginator }}French Home Page {{ $p.PageNumber }}: {{ .Title }}|{{ .IsHome }}|{{ i18n \"hello\" }}|{{ .Permalink }}|{{  .Site.Data.hugo.slogan }}|String Resource: {{ ( \"Hugo Pipes\" | resources.FromString \"text/pipes.txt\").RelPermalink  }}",
@@ -432,6 +433,9 @@ date: "2018-02-28"
 			// A shortcode in multiple languages
 			"shortcodes/lingo.html", "LingoDefault",
 			"shortcodes/lingo.fr.html", "LingoFrench",
+			// Special templates
+			"404.html", "404|{{ .Lang }}|{{ .Title }}",
+			"robots.txt", "robots|{{ .Lang }}|{{ .Title }}",
 		}
 
 		defaultI18n = []string{
@@ -469,18 +473,25 @@ func (s *sitesBuilder) Fatalf(format string, args ...interface{}) {
 }
 
 func Fatalf(t testing.TB, format string, args ...interface{}) {
-	trace := trace()
+	trace := stackTrace()
 	format = format + "\n%s"
 	args = append(args, trace)
 	t.Fatalf(format, args...)
 }
 
-func trace() string {
+func stackTrace() string {
 	return strings.Join(assert.CallerInfo(), "\n\r\t\t\t")
 }
 
+func (s *sitesBuilder) AssertFileContentFn(filename string, f func(s string) bool) {
+	content := s.FileContent(filename)
+	if !f(content) {
+		s.Fatalf("Assert failed for %q", filename)
+	}
+}
+
 func (s *sitesBuilder) AssertFileContent(filename string, matches ...string) {
-	content := readDestination(s.T, s.Fs, filename)
+	content := s.FileContent(filename)
 	for _, match := range matches {
 		if !strings.Contains(content, match) {
 			s.Fatalf("No match for %q in content for %s\n%s\n%q", match, filename, content, content)
@@ -488,6 +499,10 @@ func (s *sitesBuilder) AssertFileContent(filename string, matches ...string) {
 	}
 }
 
+func (s *sitesBuilder) FileContent(filename string) string {
+	return readDestination(s.T, s.Fs, filename)
+}
+
 func (s *sitesBuilder) AssertObject(expected string, object interface{}) {
 	got := s.dumper.Sdump(object)
 	expected = strings.TrimSpace(expected)
@@ -502,7 +517,7 @@ func (s *sitesBuilder) AssertObject(expected string, object interface{}) {
 func (s *sitesBuilder) AssertFileContentRe(filename string, matches ...string) {
 	content := readDestination(s.T, s.Fs, filename)
 	for _, match := range matches {
-		r := regexp.MustCompile(match)
+		r := regexp.MustCompile("(?s)" + match)
 		if !r.MatchString(content) {
 			s.Fatalf("No match for %q in content for %s\n%q", match, filename, content)
 		}
@@ -555,32 +570,6 @@ func (th testHelper) replaceDefaultContentLanguageValue(value string) string {
 	return value
 }
 
-func newTestPathSpec(fs *hugofs.Fs, v *viper.Viper) *helpers.PathSpec {
-	l := langs.NewDefaultLanguage(v)
-	ps, _ := helpers.NewPathSpec(fs, l)
-	return ps
-}
-
-func newTestDefaultPathSpec(t *testing.T) *helpers.PathSpec {
-	v := viper.New()
-	// Easier to reason about in tests.
-	v.Set("disablePathToLower", true)
-	v.Set("contentDir", "content")
-	v.Set("dataDir", "data")
-	v.Set("i18nDir", "i18n")
-	v.Set("layoutDir", "layouts")
-	v.Set("archetypeDir", "archetypes")
-	v.Set("assetDir", "assets")
-	v.Set("resourceDir", "resources")
-	v.Set("publishDir", "public")
-	fs := hugofs.NewDefault(v)
-	ps, err := helpers.NewPathSpec(fs, v)
-	if err != nil {
-		t.Fatal(err)
-	}
-	return ps
-}
-
 func newTestCfg() (*viper.Viper, *hugofs.Fs) {
 
 	v := viper.New()
@@ -597,27 +586,6 @@ func newTestCfg() (*viper.Viper, *hugofs.Fs) {
 
 }
 
-// newTestSite creates a new site in the  English language with in-memory Fs.
-// The site will have a template system loaded and ready to use.
-// Note: This is only used in single site tests.
-func newTestSite(t testing.TB, configKeyValues ...interface{}) *Site {
-
-	cfg, fs := newTestCfg()
-
-	for i := 0; i < len(configKeyValues); i += 2 {
-		cfg.Set(configKeyValues[i].(string), configKeyValues[i+1])
-	}
-
-	d := deps.DepsCfg{Fs: fs, Cfg: cfg}
-
-	s, err := NewSiteForCfg(d)
-
-	if err != nil {
-		Fatalf(t, "Failed to create Site: %s", err)
-	}
-	return s
-}
-
 func newTestSitesFromConfig(t testing.TB, afs afero.Fs, tomlConfig string, layoutPathContentPairs ...string) (testHelper, *HugoSites) {
 	if len(layoutPathContentPairs)%2 != 0 {
 		Fatalf(t, "Layouts must be provided in pairs")
@@ -696,11 +664,28 @@ func writeSourcesToSource(t *testing.T, base string, fs *hugofs.Fs, sources ...[
 	}
 }
 
-func dumpPages(pages ...*Page) {
+func getPage(in page.Page, ref string) page.Page {
+	p, err := in.GetPage(ref)
+	if err != nil {
+		panic(err)
+	}
+	return p
+}
+
+func dumpPages(pages ...page.Page) {
+	fmt.Println("---------")
 	for i, p := range pages {
-		fmt.Printf("%d: Kind: %s Title: %-10s RelPermalink: %-10s Path: %-10s sections: %s Len Sections(): %d\n",
+		fmt.Printf("%d: Kind: %s Title: %-10s RelPermalink: %-10s Path: %-10s sections: %s\n",
 			i+1,
-			p.Kind, p.title, p.RelPermalink(), p.Path(), p.sections, len(p.Sections()))
+			p.Kind(), p.Title(), p.RelPermalink(), p.Path(), p.SectionsPath())
+	}
+}
+
+func dumpSPages(pages ...*pageState) {
+	for i, p := range pages {
+		fmt.Printf("%d: Kind: %s Title: %-10s RelPermalink: %-10s Path: %-10s sections: %s\n",
+			i+1,
+			p.Kind(), p.Title(), p.RelPermalink(), p.Path(), p.SectionsPath())
 	}
 }
 
@@ -722,8 +707,8 @@ func printStringIndexes(s string) {
 		fmt.Println()
 
 	}
-
 }
+
 func isCI() bool {
 	return os.Getenv("CI") != ""
 }
@@ -731,3 +716,21 @@ func isCI() bool {
 func isGo111() bool {
 	return strings.Contains(runtime.Version(), "1.11")
 }
+
+// See https://github.com/golang/go/issues/19280
+// Not in use.
+var parallelEnabled = true
+
+func parallel(t *testing.T) {
+	if parallelEnabled {
+		t.Parallel()
+	}
+}
+
+// Useful to debug nilpointers/panics in templates.
+// Put "defer recoverStack()" in top of the failing function.
+func recoverStack() {
+	if r := recover(); r != nil {
+		fmt.Println(printStackTrace(1000))
+	}
+}
diff --git a/hugolib/translations.go b/hugolib/translations.go
index 2682363f003..072ce33e5ea 100644
--- a/hugolib/translations.go
+++ b/hugolib/translations.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -13,47 +13,41 @@
 
 package hugolib
 
-// Translations represent the other translations for a given page. The
-// string here is the language code, as affected by the `post.LANG.md`
-// filename.
-type Translations map[string]*Page
+import (
+	"github.com/gohugoio/hugo/resources/page"
+)
 
-func pagesToTranslationsMap(pages []*Page) map[string]Translations {
-	out := make(map[string]Translations)
+func pagesToTranslationsMap(sites []*Site) map[string]page.Pages {
+	out := make(map[string]page.Pages)
 
-	for _, page := range pages {
-		base := page.TranslationKey()
+	for _, s := range sites {
+		for _, p := range s.workAllPages {
+			// TranslationKey is implemented for all page types.
+			base := p.TranslationKey()
 
-		pageTranslation, present := out[base]
-		if !present {
-			pageTranslation = make(Translations)
-		}
+			pageTranslations, found := out[base]
+			if !found {
+				pageTranslations = make(page.Pages, 0)
+			}
 
-		pageLang := page.Lang()
-		if pageLang == "" {
-			continue
+			pageTranslations = append(pageTranslations, p)
+			out[base] = pageTranslations
 		}
-
-		pageTranslation[pageLang] = page
-		out[base] = pageTranslation
 	}
 
 	return out
 }
 
-func assignTranslationsToPages(allTranslations map[string]Translations, pages []*Page) {
-	for _, page := range pages {
-		page.translations = page.translations[:0]
-		base := page.TranslationKey()
-		trans, exist := allTranslations[base]
-		if !exist {
-			continue
-		}
+func assignTranslationsToPages(allTranslations map[string]page.Pages, sites []*Site) {
+	for _, s := range sites {
+		for _, p := range s.workAllPages {
+			base := p.TranslationKey()
+			translations, found := allTranslations[base]
+			if !found {
+				continue
+			}
 
-		for _, translatedPage := range trans {
-			page.translations = append(page.translations, translatedPage)
+			p.setTranslations(translations)
 		}
-
-		pageBy(languagePageSort).Sort(page.translations)
 	}
 }
diff --git a/langs/language.go b/langs/language.go
index d741b997828..14e3263aeb9 100644
--- a/langs/language.go
+++ b/langs/language.go
@@ -113,9 +113,19 @@ func NewLanguages(l ...*Language) Languages {
 	return languages
 }
 
-func (l Languages) Len() int           { return len(l) }
-func (l Languages) Less(i, j int) bool { return l[i].Weight < l[j].Weight }
-func (l Languages) Swap(i, j int)      { l[i], l[j] = l[j], l[i] }
+func (l Languages) Len() int { return len(l) }
+func (l Languages) Less(i, j int) bool {
+	wi, wj := l[i].Weight, l[j].Weight
+
+	if wi == wj {
+		return l[i].Lang < l[j].Lang
+	}
+
+	return wj == 0 || wi < wj
+
+}
+
+func (l Languages) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
 
 // Params retunrs language-specific params merged with the global params.
 func (l *Language) Params() map[string]interface{} {
diff --git a/lazy/init.go b/lazy/init.go
new file mode 100644
index 00000000000..5c1bee6095a
--- /dev/null
+++ b/lazy/init.go
@@ -0,0 +1,199 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package lazy
+
+import (
+	"context"
+	"sync"
+	"time"
+
+	"github.com/pkg/errors"
+)
+
+// New creates a new empty Init.
+func New() *Init {
+	return &Init{}
+}
+
+// Init holds a graph of lazily initialized dependencies.
+type Init struct {
+	mu sync.Mutex
+
+	prev     *Init
+	children []*Init
+
+	init onceMore
+	out  interface{}
+	err  error
+	f    func() (interface{}, error)
+}
+
+// Add adds a func as a new child dependency.
+func (ini *Init) Add(initFn func() (interface{}, error)) *Init {
+	if ini == nil {
+		ini = New()
+	}
+	return ini.add(false, initFn)
+}
+
+// AddWithTimeout is same as Add, but with a timeout that aborts initialization.
+func (ini *Init) AddWithTimeout(timeout time.Duration, f func(ctx context.Context) (interface{}, error)) *Init {
+	return ini.Add(func() (interface{}, error) {
+		return ini.withTimeout(timeout, f)
+	})
+}
+
+// Branch creates a new dependency branch based on an existing and adds
+// the given dependency as a child.
+func (ini *Init) Branch(initFn func() (interface{}, error)) *Init {
+	if ini == nil {
+		ini = New()
+	}
+	return ini.add(true, initFn)
+}
+
+// BranchdWithTimeout is same as Branch, but with a timeout.
+func (ini *Init) BranchdWithTimeout(timeout time.Duration, f func(ctx context.Context) (interface{}, error)) *Init {
+	return ini.Branch(func() (interface{}, error) {
+		return ini.withTimeout(timeout, f)
+	})
+}
+
+// Do initializes the entire dependency graph.
+func (ini *Init) Do() (interface{}, error) {
+	if ini == nil {
+		panic("init is nil")
+	}
+
+	ini.init.Do(func() {
+		var (
+			dependencies []*Init
+			children     []*Init
+		)
+
+		prev := ini.prev
+		for prev != nil {
+			if prev.shouldInitialize() {
+				dependencies = append(dependencies, prev)
+			}
+			prev = prev.prev
+		}
+
+		for _, child := range ini.children {
+			if child.shouldInitialize() {
+				children = append(children, child)
+			}
+		}
+
+		for _, dep := range dependencies {
+			_, err := dep.Do()
+			if err != nil {
+				ini.err = err
+				return
+			}
+		}
+
+		if ini.f != nil {
+			ini.out, ini.err = ini.f()
+		}
+
+		for _, dep := range children {
+			_, err := dep.Do()
+			if err != nil {
+				ini.err = err
+				return
+			}
+		}
+
+	})
+
+	var counter time.Duration
+	for !ini.init.Done() {
+		counter += 10
+		if counter > 600000000 {
+			panic("BUG: timed out in lazy init")
+		}
+		time.Sleep(counter * time.Microsecond)
+	}
+
+	return ini.out, ini.err
+}
+
+func (ini *Init) shouldInitialize() bool {
+	return !(ini == nil || ini.init.Done() || ini.init.InProgress())
+}
+
+// Reset resets the current and all its dependencies.
+func (ini *Init) Reset() {
+	mu := ini.init.ResetWithLock()
+	defer mu.Unlock()
+	for _, d := range ini.children {
+		d.Reset()
+	}
+}
+
+func (ini *Init) add(branch bool, initFn func() (interface{}, error)) *Init {
+	ini.mu.Lock()
+	defer ini.mu.Unlock()
+
+	if !branch {
+		ini.checkDone()
+	}
+
+	init := &Init{
+		f:    initFn,
+		prev: ini,
+	}
+
+	if !branch {
+		ini.children = append(ini.children, init)
+	}
+
+	return init
+}
+
+func (ini *Init) checkDone() {
+	if ini.init.Done() {
+		panic("init cannot be added to after it has run")
+	}
+}
+
+func (ini *Init) withTimeout(timeout time.Duration, f func(ctx context.Context) (interface{}, error)) (interface{}, error) {
+	ctx, cancel := context.WithTimeout(context.Background(), timeout)
+	defer cancel()
+	c := make(chan verr, 1)
+
+	go func() {
+		v, err := f(ctx)
+		select {
+		case <-ctx.Done():
+			return
+		default:
+			c <- verr{v: v, err: err}
+		}
+	}()
+
+	select {
+	case <-ctx.Done():
+		return nil, errors.New("timed out initializing value. This is most likely a circular loop in a shortcode")
+	case ve := <-c:
+		return ve.v, ve.err
+	}
+
+}
+
+type verr struct {
+	v   interface{}
+	err error
+}
diff --git a/lazy/init_test.go b/lazy/init_test.go
new file mode 100644
index 00000000000..bcb57acb32a
--- /dev/null
+++ b/lazy/init_test.go
@@ -0,0 +1,150 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package lazy
+
+import (
+	"context"
+	"errors"
+	"math/rand"
+	"strings"
+	"sync"
+	"testing"
+	"time"
+
+	"github.com/stretchr/testify/require"
+)
+
+func TestInit(t *testing.T) {
+	assert := require.New(t)
+
+	var result string
+
+	bigOrSmall := func() int {
+		if rand.Intn(10) < 3 {
+			return 10000 + rand.Intn(100000)
+		}
+		return 1 + rand.Intn(50)
+	}
+
+	f1 := func(name string) func() (interface{}, error) {
+		return func() (interface{}, error) {
+			result += name + "|"
+			size := bigOrSmall()
+			_ = strings.Repeat("Hugo Rocks! ", size)
+			return name, nil
+		}
+	}
+
+	f2 := func() func() (interface{}, error) {
+		return func() (interface{}, error) {
+			size := bigOrSmall()
+			_ = strings.Repeat("Hugo Rocks! ", size)
+			return size, nil
+		}
+	}
+
+	root := New()
+
+	root.Add(f1("root(1)"))
+	root.Add(f1("root(2)"))
+
+	branch1 := root.Branch(f1("branch_1"))
+	branch1.Add(f1("branch_1_1"))
+	branch1_2 := branch1.Add(f1("branch_1_2"))
+	branch1_2_1 := branch1_2.Add(f1("branch_1_2_1"))
+
+	var wg sync.WaitGroup
+
+	// Add some concurrency and randomness to verify thread safety and
+	// init order.
+	for i := 0; i < 100; i++ {
+		wg.Add(1)
+		go func(i int) {
+			defer wg.Done()
+			var err error
+			if rand.Intn(10) < 5 {
+				_, err = root.Do()
+				assert.NoError(err)
+			}
+
+			// Add a new branch on the fly.
+			if rand.Intn(10) > 5 {
+				branch := branch1_2.Branch(f2())
+				init := branch.Add(f2())
+				_, err = init.Do()
+				assert.NoError(err)
+			} else {
+				_, err = branch1_2_1.Do()
+				assert.NoError(err)
+			}
+			_, err = branch1_2.Do()
+			assert.NoError(err)
+
+		}(i)
+
+		wg.Wait()
+
+		assert.Equal("root(1)|root(2)|branch_1|branch_1_1|branch_1_2|branch_1_2_1|", result)
+
+	}
+
+}
+
+func TestInitAddWithTimeout(t *testing.T) {
+	assert := require.New(t)
+
+	init := New().AddWithTimeout(100*time.Millisecond, func(ctx context.Context) (interface{}, error) {
+		return nil, nil
+	})
+
+	_, err := init.Do()
+
+	assert.NoError(err)
+}
+
+func TestInitAddWithTimeoutTimeout(t *testing.T) {
+	assert := require.New(t)
+
+	init := New().AddWithTimeout(100*time.Millisecond, func(ctx context.Context) (interface{}, error) {
+		time.Sleep(500 * time.Millisecond)
+		select {
+		case <-ctx.Done():
+			return nil, nil
+		default:
+		}
+		t.Fatal("slept")
+		return nil, nil
+	})
+
+	_, err := init.Do()
+
+	assert.Error(err)
+
+	assert.Contains(err.Error(), "timed out")
+
+	time.Sleep(1 * time.Second)
+
+}
+
+func TestInitAddWithTimeoutError(t *testing.T) {
+	assert := require.New(t)
+
+	init := New().AddWithTimeout(100*time.Millisecond, func(ctx context.Context) (interface{}, error) {
+		return nil, errors.New("failed")
+	})
+
+	_, err := init.Do()
+
+	assert.Error(err)
+}
diff --git a/lazy/once.go b/lazy/once.go
new file mode 100644
index 00000000000..c434bfa0b73
--- /dev/null
+++ b/lazy/once.go
@@ -0,0 +1,69 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package lazy
+
+import (
+	"sync"
+	"sync/atomic"
+)
+
+// onceMore is similar to sync.Once.
+//
+// Additional features are:
+// * it can be reset, so the action can be repeated if needed
+// * it has methods to check if it's done or in progress
+//
+type onceMore struct {
+	mu   sync.Mutex
+	lock uint32
+	done uint32
+}
+
+func (t *onceMore) Do(f func()) {
+	if atomic.LoadUint32(&t.done) == 1 {
+		return
+	}
+
+	// f may call this Do and we would get a deadlock.
+	locked := atomic.CompareAndSwapUint32(&t.lock, 0, 1)
+	if !locked {
+		return
+	}
+	defer atomic.StoreUint32(&t.lock, 0)
+
+	t.mu.Lock()
+	defer t.mu.Unlock()
+
+	// Double check
+	if t.done == 1 {
+		return
+	}
+	defer atomic.StoreUint32(&t.done, 1)
+	f()
+
+}
+
+func (t *onceMore) InProgress() bool {
+	return atomic.LoadUint32(&t.lock) == 1
+}
+
+func (t *onceMore) Done() bool {
+	return atomic.LoadUint32(&t.done) == 1
+}
+
+func (t *onceMore) ResetWithLock() *sync.Mutex {
+	t.mu.Lock()
+	defer atomic.StoreUint32(&t.done, 0)
+	return &t.mu
+}
diff --git a/magefile.go b/magefile.go
index 19485b2bebb..04f0499a24f 100644
--- a/magefile.go
+++ b/magefile.go
@@ -15,6 +15,9 @@ import (
 	"sync"
 	"time"
 
+	"github.com/gohugoio/hugo/codegen"
+	"github.com/gohugoio/hugo/resources/page/page_generate"
+
 	"github.com/magefile/mage/mg"
 	"github.com/magefile/mage/sh"
 )
@@ -64,7 +67,37 @@ func flagEnv() map[string]string {
 }
 
 func Generate() error {
-	return sh.RunWith(flagEnv(), goexe, "generate", path.Join(packageName, "tpl/tplimpl/embedded/generate"))
+	generatorPackages := []string{
+		"tpl/tplimpl/embedded/generate",
+		//"resources/page/generate",
+	}
+
+	for _, pkg := range generatorPackages {
+		if err := sh.RunWith(flagEnv(), goexe, "generate", path.Join(packageName, pkg)); err != nil {
+			return err
+		}
+	}
+
+	dir, _ := os.Getwd()
+	c := codegen.NewInspector(dir)
+
+	if err := page_generate.Generate(c); err != nil {
+		return err
+	}
+
+	goFmtPatterns := []string{
+		// TODO(bep) check: stat ./resources/page/*autogen*: no such file or directory
+		"./resources/page/page_marshaljson.autogen.go",
+		"./resources/page/page_wrappers.autogen.go",
+	}
+
+	for _, pattern := range goFmtPatterns {
+		if err := sh.Run("gofmt", "-w", filepath.FromSlash(pattern)); err != nil {
+			return err
+		}
+	}
+
+	return nil
 }
 
 // Build hugo without git info
diff --git a/media/mediaType.go b/media/mediaType.go
index 01a6b9582c4..434672c4325 100644
--- a/media/mediaType.go
+++ b/media/mediaType.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -45,6 +45,7 @@ type Type struct {
 
 	Delimiter string `json:"delimiter"` // e.g. "."
 
+	// TODO(bep) make this a string to make it hashable + method
 	Suffixes []string `json:"suffixes"`
 
 	// Set when doing lookup by suffix.
@@ -138,6 +139,10 @@ var (
 	TOMLType       = Type{MainType: "application", SubType: "toml", Suffixes: []string{"toml"}, Delimiter: defaultDelimiter}
 	YAMLType       = Type{MainType: "application", SubType: "yaml", Suffixes: []string{"yaml", "yml"}, Delimiter: defaultDelimiter}
 
+	// Common image types
+	PNGType = Type{MainType: "image", SubType: "png", Suffixes: []string{"png"}, Delimiter: defaultDelimiter}
+	JPGType = Type{MainType: "image", SubType: "jpg", Suffixes: []string{"jpg", "jpeg"}, Delimiter: defaultDelimiter}
+
 	OctetType = Type{MainType: "application", SubType: "octet-stream"}
 )
 
@@ -158,6 +163,8 @@ var DefaultTypes = Types{
 	OctetType,
 	YAMLType,
 	TOMLType,
+	PNGType,
+	JPGType,
 }
 
 func init() {
diff --git a/media/mediaType_test.go b/media/mediaType_test.go
index ea6499a1491..e51f29b1289 100644
--- a/media/mediaType_test.go
+++ b/media/mediaType_test.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -52,7 +52,7 @@ func TestDefaultTypes(t *testing.T) {
 
 	}
 
-	require.Equal(t, 15, len(DefaultTypes))
+	require.Equal(t, 17, len(DefaultTypes))
 
 }
 
diff --git a/hugolib/menu.go b/navigation/menu.go
similarity index 89%
rename from hugolib/menu.go
rename to navigation/menu.go
index 81c13640573..66721ea8a60 100644
--- a/hugolib/menu.go
+++ b/navigation/menu.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -11,7 +11,7 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package hugolib
+package navigation
 
 import (
 	"html/template"
@@ -25,7 +25,7 @@ import (
 // or in the site config.
 type MenuEntry struct {
 	URL        string
-	Page       *Page
+	Page       Page
 	Name       string
 	Menu       string
 	Identifier string
@@ -37,11 +37,21 @@ type MenuEntry struct {
 	Children   Menu
 }
 
+// A narrow version of page.Page.
+type Page interface {
+	LinkTitle() string
+	RelPermalink() string
+	Section() string
+	Weight() int
+	IsPage() bool
+	Params() map[string]interface{}
+}
+
 // Menu is a collection of menu entries.
 type Menu []*MenuEntry
 
 // Menus is a dictionary of menus.
-type Menus map[string]*Menu
+type Menus map[string]Menu
 
 // PageMenus is a dictionary of menus defined in the Pages.
 type PageMenus map[string]*MenuEntry
@@ -80,7 +90,7 @@ func (m *MenuEntry) IsSameResource(inme *MenuEntry) bool {
 	return m.URL != "" && inme.URL != "" && m.URL == inme.URL
 }
 
-func (m *MenuEntry) marshallMap(ime map[string]interface{}) {
+func (m *MenuEntry) MarshallMap(ime map[string]interface{}) {
 	for k, v := range ime {
 		loki := strings.ToLower(k)
 		switch loki {
@@ -104,24 +114,9 @@ func (m *MenuEntry) marshallMap(ime map[string]interface{}) {
 	}
 }
 
-func (m Menu) add(me *MenuEntry) Menu {
-	app := func(slice Menu, x ...*MenuEntry) Menu {
-		n := len(slice) + len(x)
-		if n > cap(slice) {
-			size := cap(slice) * 2
-			if size < n {
-				size = n
-			}
-			new := make(Menu, size)
-			copy(new, slice)
-			slice = new
-		}
-		slice = slice[0:n]
-		copy(slice[n-len(x):], x)
-		return slice
-	}
-
-	m = app(m, me)
+func (m Menu) Add(me *MenuEntry) Menu {
+	m = append(m, me)
+	// TODO(bep)
 	m.Sort()
 	return m
 }
diff --git a/navigation/pagemenus.go b/navigation/pagemenus.go
new file mode 100644
index 00000000000..86a4aeaec1d
--- /dev/null
+++ b/navigation/pagemenus.go
@@ -0,0 +1,240 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package navigation
+
+import (
+	"github.com/pkg/errors"
+	"github.com/spf13/cast"
+)
+
+type PageMenusProvider interface {
+	PageMenusGetter
+	MenyQueryProvider
+}
+
+type PageMenusGetter interface {
+	Menus() PageMenus
+}
+
+type MenusGetter interface {
+	Menus() Menus
+}
+
+type MenyQueryProvider interface {
+	HasMenuCurrent(menuID string, me *MenuEntry) bool
+	IsMenuCurrent(menuID string, inme *MenuEntry) bool
+}
+
+func PageMenusFromPage(p Page) (PageMenus, error) {
+	params := p.Params()
+
+	ms, ok := params["menus"]
+	if !ok {
+		ms, ok = params["menu"]
+	}
+
+	pm := PageMenus{}
+
+	if !ok {
+		return nil, nil
+	}
+
+	link := p.RelPermalink()
+
+	me := MenuEntry{Page: p, Name: p.LinkTitle(), Weight: p.Weight(), URL: link}
+
+	// Could be the name of the menu to attach it to
+	mname, err := cast.ToStringE(ms)
+
+	if err == nil {
+		me.Menu = mname
+		pm[mname] = &me
+		return nil, nil
+	}
+
+	// Could be a slice of strings
+	mnames, err := cast.ToStringSliceE(ms)
+
+	if err == nil {
+		for _, mname := range mnames {
+			me.Menu = mname
+			pm[mname] = &me
+		}
+		return nil, nil
+	}
+
+	// Could be a structured menu entry
+	menus, err := cast.ToStringMapE(ms)
+	if err != nil {
+		return pm, errors.Wrapf(err, "unable to process menus for %q", p.LinkTitle())
+	}
+
+	for name, menu := range menus {
+		menuEntry := MenuEntry{Page: p, Name: p.LinkTitle(), URL: link, Weight: p.Weight(), Menu: name}
+		if menu != nil {
+			ime, err := cast.ToStringMapE(menu)
+			if err != nil {
+				return pm, errors.Wrapf(err, "unable to process menus for %q", p.LinkTitle())
+			}
+
+			menuEntry.MarshallMap(ime)
+		}
+		pm[name] = &menuEntry
+	}
+
+	return pm, nil
+
+}
+
+func NewMenuQueryProvider(
+	setionPagesMenu string,
+	pagem PageMenusGetter,
+	sitem MenusGetter,
+	p Page) MenyQueryProvider {
+
+	return &pageMenus{
+		p:               p,
+		pagem:           pagem,
+		sitem:           sitem,
+		setionPagesMenu: setionPagesMenu,
+	}
+}
+
+type pageMenus struct {
+	pagem           PageMenusGetter
+	sitem           MenusGetter
+	setionPagesMenu string
+	p               Page
+}
+
+func (pm *pageMenus) HasMenuCurrent(menuID string, me *MenuEntry) bool {
+
+	// page is labeled as "shadow-member" of the menu with the same identifier as the section
+	if pm.setionPagesMenu != "" {
+		section := pm.p.Section()
+
+		if section != "" && pm.setionPagesMenu == menuID && section == me.Identifier {
+			return true
+		}
+	}
+
+	if !me.HasChildren() {
+		return false
+	}
+
+	menus := pm.pagem.Menus()
+
+	if m, ok := menus[menuID]; ok {
+
+		for _, child := range me.Children {
+			if child.IsEqual(m) {
+				return true
+			}
+			if pm.HasMenuCurrent(menuID, child) {
+				return true
+			}
+		}
+	}
+
+	if pm.p == nil || pm.p.IsPage() {
+		return false
+	}
+
+	// The following logic is kept from back when Hugo had both Page and Node types.
+	// TODO(bep) consolidate / clean
+	nme := MenuEntry{Page: pm.p, Name: pm.p.LinkTitle(), URL: pm.p.RelPermalink()}
+
+	for _, child := range me.Children {
+		if nme.IsSameResource(child) {
+			return true
+		}
+		if pm.HasMenuCurrent(menuID, child) {
+			return true
+		}
+	}
+
+	return false
+
+}
+
+func (pm *pageMenus) IsMenuCurrent(menuID string, inme *MenuEntry) bool {
+	menus := pm.pagem.Menus()
+
+	if me, ok := menus[menuID]; ok {
+		if me.IsEqual(inme) {
+			return true
+		}
+	}
+
+	if pm.p == nil || pm.p.IsPage() {
+		return false
+	}
+
+	// The following logic is kept from back when Hugo had both Page and Node types.
+	// TODO(bep) consolidate / clean
+	me := MenuEntry{Page: pm.p, Name: pm.p.LinkTitle(), URL: pm.p.RelPermalink()}
+
+	if !me.IsSameResource(inme) {
+		return false
+	}
+
+	// this resource may be included in several menus
+	// search for it to make sure that it is in the menu with the given menuId
+	if menu, ok := pm.sitem.Menus()[menuID]; ok {
+		for _, menuEntry := range menu {
+			if menuEntry.IsSameResource(inme) {
+				return true
+			}
+
+			descendantFound := pm.isSameAsDescendantMenu(inme, menuEntry)
+			if descendantFound {
+				return descendantFound
+			}
+
+		}
+	}
+
+	return false
+}
+
+func (pm *pageMenus) isSameAsDescendantMenu(inme *MenuEntry, parent *MenuEntry) bool {
+	if parent.HasChildren() {
+		for _, child := range parent.Children {
+			if child.IsSameResource(inme) {
+				return true
+			}
+			descendantFound := pm.isSameAsDescendantMenu(inme, child)
+			if descendantFound {
+				return descendantFound
+			}
+		}
+	}
+	return false
+}
+
+var NopPageMenus = new(nopPageMenus)
+
+type nopPageMenus int
+
+func (m nopPageMenus) Menus() PageMenus {
+	return PageMenus{}
+}
+
+func (m nopPageMenus) HasMenuCurrent(menuID string, me *MenuEntry) bool {
+	return false
+}
+
+func (m nopPageMenus) IsMenuCurrent(menuID string, inme *MenuEntry) bool {
+	return false
+}
diff --git a/output/outputFormat.go b/output/outputFormat.go
index 9b1f838546a..5a794e340c3 100644
--- a/output/outputFormat.go
+++ b/output/outputFormat.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -69,17 +69,27 @@ type Format struct {
 	// Note that we use the term "alternative" and not "alternate" here, as it
 	// does not necessarily replace the other format, it is an alternative representation.
 	NotAlternative bool `json:"notAlternative"`
+
+	// Setting this will make this output format control the value of
+	// .Permalink and .RelPermalink for a rendered Page.
+	// If not set, these values will point to the main (first) output format
+	// configured. That is probably the behaviour you want in most situations,
+	// as you probably don't want to link back to the RSS version of a page, as an
+	// example. AMP would, however, be a good example of an output format where this
+	// behaviour is wanted.
+	Permalinkable bool
 }
 
 // An ordered list of built-in output formats.
 var (
 	AMPFormat = Format{
-		Name:      "AMP",
-		MediaType: media.HTMLType,
-		BaseName:  "index",
-		Path:      "amp",
-		Rel:       "amphtml",
-		IsHTML:    true,
+		Name:          "AMP",
+		MediaType:     media.HTMLType,
+		BaseName:      "index",
+		Path:          "amp",
+		Rel:           "amphtml",
+		IsHTML:        true,
+		Permalinkable: true,
 		// See https://www.ampproject.org/learn/overview/
 	}
 
@@ -109,11 +119,12 @@ var (
 	}
 
 	HTMLFormat = Format{
-		Name:      "HTML",
-		MediaType: media.HTMLType,
-		BaseName:  "index",
-		Rel:       "canonical",
-		IsHTML:    true,
+		Name:          "HTML",
+		MediaType:     media.HTMLType,
+		BaseName:      "index",
+		Rel:           "canonical",
+		IsHTML:        true,
+		Permalinkable: true,
 	}
 
 	JSONFormat = Format{
diff --git a/output/outputFormat_test.go b/output/outputFormat_test.go
index 410fd74ba0e..6bd4dda5b2c 100644
--- a/output/outputFormat_test.go
+++ b/output/outputFormat_test.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -42,6 +42,7 @@ func TestDefaultTypes(t *testing.T) {
 	require.Empty(t, CSVFormat.Protocol)
 	require.True(t, CSVFormat.IsPlainText)
 	require.False(t, CSVFormat.IsHTML)
+	require.False(t, CSVFormat.Permalinkable)
 
 	require.Equal(t, "HTML", HTMLFormat.Name)
 	require.Equal(t, media.HTMLType, HTMLFormat.MediaType)
@@ -49,6 +50,7 @@ func TestDefaultTypes(t *testing.T) {
 	require.Empty(t, HTMLFormat.Protocol)
 	require.False(t, HTMLFormat.IsPlainText)
 	require.True(t, HTMLFormat.IsHTML)
+	require.True(t, AMPFormat.Permalinkable)
 
 	require.Equal(t, "AMP", AMPFormat.Name)
 	require.Equal(t, media.HTMLType, AMPFormat.MediaType)
@@ -56,6 +58,7 @@ func TestDefaultTypes(t *testing.T) {
 	require.Empty(t, AMPFormat.Protocol)
 	require.False(t, AMPFormat.IsPlainText)
 	require.True(t, AMPFormat.IsHTML)
+	require.True(t, AMPFormat.Permalinkable)
 
 	require.Equal(t, "RSS", RSSFormat.Name)
 	require.Equal(t, media.RSSType, RSSFormat.MediaType)
diff --git a/parser/pageparser/itemtype_string.go b/parser/pageparser/itemtype_string.go
new file mode 100644
index 00000000000..632afaecc00
--- /dev/null
+++ b/parser/pageparser/itemtype_string.go
@@ -0,0 +1,16 @@
+// Code generated by "stringer -type ItemType"; DO NOT EDIT.
+
+package pageparser
+
+import "strconv"
+
+const _ItemType_name = "tErrortEOFTypeHTMLStartTypeLeadSummaryDividerTypeFrontMatterYAMLTypeFrontMatterTOMLTypeFrontMatterJSONTypeFrontMatterORGTypeEmojiTypeIgnoretLeftDelimScNoMarkuptRightDelimScNoMarkuptLeftDelimScWithMarkuptRightDelimScWithMarkuptScClosetScNametScNameInlinetScParamtScParamValtTexttKeywordMarker"
+
+var _ItemType_index = [...]uint16{0, 6, 10, 23, 45, 64, 83, 102, 120, 129, 139, 159, 180, 202, 225, 233, 240, 253, 261, 272, 277, 291}
+
+func (i ItemType) String() string {
+	if i < 0 || i >= ItemType(len(_ItemType_index)-1) {
+		return "ItemType(" + strconv.FormatInt(int64(i), 10) + ")"
+	}
+	return _ItemType_name[_ItemType_index[i]:_ItemType_index[i+1]]
+}
diff --git a/parser/pageparser/pageparser.go b/parser/pageparser/pageparser.go
index 14b341ee9d8..db563d44c7f 100644
--- a/parser/pageparser/pageparser.go
+++ b/parser/pageparser/pageparser.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -36,16 +36,28 @@ type Result interface {
 var _ Result = (*pageLexer)(nil)
 
 // Parse parses the page in the given reader according to the given Config.
+// TODO(bep) now that we have improved the "lazy order" init, it *may* be
+// some potential saving in doing a buffered approach where the first pass does
+// the frontmatter only.
 func Parse(r io.Reader, cfg Config) (Result, error) {
+	return parseSection(r, cfg, lexIntroSection)
+}
+
+// ParseMain parses starting with the main section. Used in tests.
+func ParseMain(r io.Reader, cfg Config) (Result, error) {
+	return parseSection(r, cfg, lexMainSection)
+}
+
+func parseSection(r io.Reader, cfg Config, start stateFunc) (Result, error) {
 	b, err := ioutil.ReadAll(r)
 	if err != nil {
 		return nil, errors.Wrap(err, "failed to read page content")
 	}
-	return parseBytes(b, cfg)
+	return parseBytes(b, cfg, start)
 }
 
-func parseBytes(b []byte, cfg Config) (Result, error) {
-	lexer := newPageLexer(b, lexIntroSection, cfg)
+func parseBytes(b []byte, cfg Config, start stateFunc) (Result, error) {
+	lexer := newPageLexer(b, start, cfg)
 	lexer.run()
 	return lexer, nil
 }
@@ -60,7 +72,7 @@ type Iterator struct {
 // consumes and returns the next item
 func (t *Iterator) Next() Item {
 	t.lastPos++
-	return t.current()
+	return t.Current()
 }
 
 // Input returns the input source.
@@ -70,7 +82,8 @@ func (t *Iterator) Input() []byte {
 
 var errIndexOutOfBounds = Item{tError, 0, []byte("no more tokens")}
 
-func (t *Iterator) current() Item {
+// Current will repeatably return the current item.
+func (t *Iterator) Current() Item {
 	if t.lastPos >= len(t.l.items) {
 		return errIndexOutOfBounds
 	}
@@ -122,5 +135,5 @@ func (t *Iterator) Consume(cnt int) {
 
 // LineNumber returns the current line number. Used for logging.
 func (t *Iterator) LineNumber() int {
-	return bytes.Count(t.l.input[:t.current().Pos], lf) + 1
+	return bytes.Count(t.l.input[:t.Current().Pos], lf) + 1
 }
diff --git a/parser/pageparser/pageparser_test.go b/parser/pageparser/pageparser_test.go
index fad7082d272..f54376c339d 100644
--- a/parser/pageparser/pageparser_test.go
+++ b/parser/pageparser/pageparser_test.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -38,7 +38,7 @@ This is some summary. This is some summary. This is some summary. This is some s
 
 	b.ResetTimer()
 	for i := 0; i < b.N; i++ {
-		if _, err := parseBytes(input, cfg); err != nil {
+		if _, err := parseBytes(input, cfg, lexIntroSection); err != nil {
 			b.Fatal(err)
 		}
 	}
@@ -64,7 +64,7 @@ This is some summary. This is some summary. This is some summary. This is some s
 
 	b.ResetTimer()
 	for i := 0; i < b.N; i++ {
-		if _, err := parseBytes(input, cfg); err != nil {
+		if _, err := parseBytes(input, cfg, lexIntroSection); err != nil {
 			b.Fatal(err)
 		}
 	}
diff --git a/publisher/publisher.go b/publisher/publisher.go
index 0da70546135..119be356ba1 100644
--- a/publisher/publisher.go
+++ b/publisher/publisher.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -86,7 +86,7 @@ func NewDestinationPublisher(fs afero.Fs, outputFormats output.Formats, mediaTyp
 // to its destination, e.g. /public.
 func (p DestinationPublisher) Publish(d Descriptor) error {
 	if d.TargetPath == "" {
-		return errors.New("must provide a TargetPath")
+		return errors.New("Publish: must provide a TargetPath")
 	}
 
 	src := d.Src
diff --git a/related/inverted_index.go b/related/inverted_index.go
index 309eb4097d0..fda6b922263 100644
--- a/related/inverted_index.go
+++ b/related/inverted_index.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -106,11 +106,15 @@ type IndexConfig struct {
 
 // Document is the interface an indexable document in Hugo must fulfill.
 type Document interface {
-	// SearchKeywords returns a list of keywords for the given index config.
-	SearchKeywords(cfg IndexConfig) ([]Keyword, error)
+	// RelatedKeywords returns a list of keywords for the given index config.
+	RelatedKeywords(cfg IndexConfig) ([]Keyword, error)
 
 	// When this document was or will be published.
-	PubDate() time.Time
+	PublishDate() time.Time
+
+	// Name is used as an tiebreaker if both Weight and PublishDate are
+	// the same.
+	Name() string
 }
 
 // InvertedIndex holds an inverted index, also sometimes named posting list, which
@@ -164,7 +168,7 @@ func (idx *InvertedIndex) Add(docs ...Document) error {
 
 		for _, doc := range docs {
 			var words []Keyword
-			words, err = doc.SearchKeywords(config)
+			words, err = doc.RelatedKeywords(config)
 			if err != nil {
 				continue
 			}
@@ -211,7 +215,10 @@ func (r ranks) Len() int      { return len(r) }
 func (r ranks) Swap(i, j int) { r[i], r[j] = r[j], r[i] }
 func (r ranks) Less(i, j int) bool {
 	if r[i].Weight == r[j].Weight {
-		return r[i].Doc.PubDate().After(r[j].Doc.PubDate())
+		if r[i].Doc.PublishDate() == r[j].Doc.PublishDate() {
+			return r[i].Doc.Name() < r[j].Doc.Name()
+		}
+		return r[i].Doc.PublishDate().After(r[j].Doc.PublishDate())
 	}
 	return r[i].Weight > r[j].Weight
 }
@@ -241,7 +248,7 @@ func (idx *InvertedIndex) SearchDoc(doc Document, indices ...string) ([]Document
 	}
 
 	for _, cfg := range configs {
-		keywords, err := doc.SearchKeywords(cfg)
+		keywords, err := doc.RelatedKeywords(cfg)
 		if err != nil {
 			return nil, err
 		}
@@ -250,7 +257,7 @@ func (idx *InvertedIndex) SearchDoc(doc Document, indices ...string) ([]Document
 
 	}
 
-	return idx.searchDate(doc.PubDate(), q...)
+	return idx.searchDate(doc.PublishDate(), q...)
 }
 
 // ToKeywords returns a Keyword slice of the given input.
@@ -344,7 +351,7 @@ func (idx *InvertedIndex) searchDate(upperDate time.Time, query ...queryElement)
 				for _, doc := range docs {
 					if applyDateFilter {
 						// Exclude newer than the limit given
-						if doc.PubDate().After(upperDate) {
+						if doc.PublishDate().After(upperDate) {
 							continue
 						}
 					}
diff --git a/related/inverted_index_test.go b/related/inverted_index_test.go
index 2e6b90bbf0b..4ef27875d41 100644
--- a/related/inverted_index_test.go
+++ b/related/inverted_index_test.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -25,6 +25,7 @@ import (
 type testDoc struct {
 	keywords map[string][]Keyword
 	date     time.Time
+	name     string
 }
 
 func (d *testDoc) String() string {
@@ -39,11 +40,19 @@ func (d *testDoc) String() string {
 	return s
 }
 
+func (d *testDoc) Name() string {
+	return d.name
+}
+
 func newTestDoc(name string, keywords ...string) *testDoc {
+	time.Sleep(1 * time.Millisecond)
+	return newTestDocWithDate(name, time.Now(), keywords...)
+}
+
+func newTestDocWithDate(name string, date time.Time, keywords ...string) *testDoc {
 	km := make(map[string][]Keyword)
 
-	time.Sleep(1 * time.Millisecond)
-	kw := &testDoc{keywords: km, date: time.Now()}
+	kw := &testDoc{keywords: km, date: date}
 
 	kw.addKeywords(name, keywords...)
 	return kw
@@ -68,11 +77,11 @@ func createTestKeywords(name string, keywords ...string) map[string][]string {
 	}
 }
 
-func (d *testDoc) SearchKeywords(cfg IndexConfig) ([]Keyword, error) {
+func (d *testDoc) RelatedKeywords(cfg IndexConfig) ([]Keyword, error) {
 	return d.keywords[cfg.Name], nil
 }
 
-func (d *testDoc) PubDate() time.Time {
+func (d *testDoc) PublishDate() time.Time {
 	return d.date
 }
 
@@ -167,6 +176,29 @@ func TestSearch(t *testing.T) {
 		assert.Equal(docs[3], m[0])
 	})
 
+	t.Run("searchdoc-keywords-same-date", func(t *testing.T) {
+		assert := require.New(t)
+		idx := NewInvertedIndex(config)
+
+		date := time.Now()
+
+		doc := newTestDocWithDate("keywords", date, "a", "b")
+		doc.name = "thedoc"
+
+		for i := 0; i < 10; i++ {
+			docc := *doc
+			docc.name = fmt.Sprintf("doc%d", i)
+			idx.Add(&docc)
+		}
+
+		m, err := idx.SearchDoc(doc, "keywords")
+		assert.NoError(err)
+		assert.Len(m, 10)
+		for i := 0; i < 10; i++ {
+			assert.Equal(fmt.Sprintf("doc%d", i), m[i].Name())
+		}
+	})
+
 }
 
 func BenchmarkRelatedNewIndex(b *testing.B) {
diff --git a/resources/image.go b/resources/image.go
index d46facac5ab..89186f9e286 100644
--- a/resources/image.go
+++ b/resources/image.go
@@ -21,7 +21,6 @@ import (
 	"image/draw"
 	"image/jpeg"
 	"io"
-	"io/ioutil"
 	"os"
 	"strconv"
 	"strings"
@@ -126,8 +125,6 @@ type Image struct {
 	configInit   sync.Once
 	configLoaded bool
 
-	copyToDestinationInit sync.Once
-
 	imaging *Imaging
 
 	format imaging.Format
@@ -462,6 +459,7 @@ func (i *Image) decodeSource() (image.Image, error) {
 	return img, err
 }
 
+// returns an opened file or nil if nothing to write.
 func (i *Image) openDestinationsForWriting() (io.WriteCloser, error) {
 	targetFilenames := i.targetFilenames()
 	var changedFilenames []string
@@ -470,7 +468,9 @@ func (i *Image) openDestinationsForWriting() (io.WriteCloser, error) {
 	// This is a processed version of the original.
 	// If it exists on destination with the same filename and file size, it is
 	// the same file, so no need to transfer it again.
+	// TODO(bep) page fix this logic somehow
 	for _, targetFilename := range targetFilenames {
+
 		if fi, err := i.spec.BaseFs.PublishFs.Stat(targetFilename); err == nil && fi.Size() == i.osFileInfo.Size() {
 			continue
 		}
@@ -478,14 +478,7 @@ func (i *Image) openDestinationsForWriting() (io.WriteCloser, error) {
 	}
 
 	if len(changedFilenames) == 0 {
-		return struct {
-			io.Writer
-			io.Closer
-		}{
-			ioutil.Discard,
-			ioutil.NopCloser(nil),
-		}, nil
-
+		return nil, nil
 	}
 
 	return helpers.OpenFilesForWriting(i.spec.BaseFs.PublishFs, changedFilenames...)
diff --git a/resources/image_cache.go b/resources/image_cache.go
index 58be839b33c..cf1e999badc 100644
--- a/resources/image_cache.go
+++ b/resources/image_cache.go
@@ -14,13 +14,11 @@
 package resources
 
 import (
-	"fmt"
 	"image"
 	"io"
 	"path/filepath"
 	"strings"
 	"sync"
-	"time"
 
 	"github.com/gohugoio/hugo/common/hugio"
 
@@ -99,6 +97,11 @@ func (c *imageCache) getOrCreate(
 			return err
 		}
 
+		if w == nil {
+			// Nothing to write.
+			return nil
+		}
+
 		defer w.Close()
 		_, err = io.Copy(w, r)
 		return err
@@ -121,10 +124,12 @@ func (c *imageCache) getOrCreate(
 			return err
 		}
 
-		mw := hugio.NewMultiWriteCloser(w, destinations)
-		defer mw.Close()
+		if destinations != nil {
+			w = hugio.NewMultiWriteCloser(w, destinations)
+		}
+		defer w.Close()
 
-		return img.encodeTo(conf, conv, mw)
+		return img.encodeTo(conf, conv, w)
 	}
 
 	// Now look in the file cache.
@@ -157,8 +162,3 @@ func (c *imageCache) getOrCreate(
 func newImageCache(fileCache *filecache.Cache, ps *helpers.PathSpec) *imageCache {
 	return &imageCache{fileCache: fileCache, pathSpec: ps, store: make(map[string]*Image)}
 }
-
-func timeTrack(start time.Time, name string) {
-	elapsed := time.Since(start)
-	fmt.Printf("%s took %s\n", name, elapsed)
-}
diff --git a/resources/page/page.go b/resources/page/page.go
new file mode 100644
index 00000000000..efbefb45610
--- /dev/null
+++ b/resources/page/page.go
@@ -0,0 +1,365 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package page contains the core interfaces and types for the Page resource,
+// a core component in Hugo.
+package page
+
+import (
+	"html/template"
+
+	"github.com/bep/gitmap"
+	"github.com/gohugoio/hugo/config"
+
+	"github.com/gohugoio/hugo/common/hugo"
+	"github.com/gohugoio/hugo/common/maps"
+
+	"github.com/gohugoio/hugo/compare"
+
+	"github.com/gohugoio/hugo/navigation"
+	"github.com/gohugoio/hugo/related"
+	"github.com/gohugoio/hugo/resources/resource"
+	"github.com/gohugoio/hugo/source"
+)
+
+// Clear clears any global package state.
+func Clear() error {
+	spc.clear()
+	return nil
+}
+
+// AlternativeOutputFormatsProvider provides alternative output formats for a
+// Page.
+type AlternativeOutputFormatsProvider interface {
+	// AlternativeOutputFormats gives the alternative output formats for the
+	// current output.
+	// Note that we use the term "alternative" and not "alternate" here, as it
+	// does not necessarily replace the other format, it is an alternative representation.
+	AlternativeOutputFormats() OutputFormats
+}
+
+// AuthorProvider provides author information.
+type AuthorProvider interface {
+	Author() Author
+	Authors() AuthorList
+}
+
+// ChildCareProvider provides accessors to child resources.
+type ChildCareProvider interface {
+	Pages() Pages
+	Resources() resource.Resources
+}
+
+// ContentProvider provides the content related values for a Page.
+type ContentProvider interface {
+	Content() (interface{}, error)
+	Plain() string
+	PlainWords() []string
+	Summary() template.HTML
+	Truncated() bool
+	FuzzyWordCount() int
+	WordCount() int
+	ReadingTime() int
+	Len() int
+}
+
+// FileProvider provides the source file.
+type FileProvider interface {
+	File() source.File
+}
+
+// GetPageProvider provides the GetPage method.
+type GetPageProvider interface {
+	// GetPage looks up a page for the given ref.
+	//    {{ with .GetPage "blog" }}{{ .Title }}{{ end }}
+	//
+	// This will return nil when no page could be found, and will return
+	// an error if the ref is ambiguous.
+	GetPage(ref string) (Page, error)
+}
+
+// GitInfoProvider provides Git info.
+type GitInfoProvider interface {
+	GitInfo() *gitmap.GitInfo
+}
+
+// InSectionPositioner provides section navigation.
+type InSectionPositioner interface {
+	NextInSection() Page
+	PrevInSection() Page
+}
+
+// InternalDependencies is considered an internal interface.
+type InternalDependencies interface {
+	GetRelatedDocsHandler() *RelatedDocsHandler
+}
+
+// OutputFormatsProvider provides the OutputFormats of a Page.
+type OutputFormatsProvider interface {
+	OutputFormats() OutputFormats
+}
+
+// Page is the core interface in Hugo.
+type Page interface {
+	ContentProvider
+	TableOfContentsProvider
+	PageWithoutContent
+}
+
+// PageMetaProvider provides page metadata, typically provided via front matter.
+type PageMetaProvider interface {
+	// The 4 page dates
+	resource.Dated
+
+	// Aliases forms the base for redirects generation.
+	Aliases() []string
+
+	// BundleType returns the bundle type: "leaf", "branch" or an empty string if it is none.
+	// See https://gohugo.io/content-management/page-bundles/
+	BundleType() string
+
+	// A configured description.
+	Description() string
+
+	// Whether this is a draft. Will only be true if run with the --buildDrafts (-D) flag.
+	Draft() bool
+
+	// IsHome returns whether this is the home page.
+	IsHome() bool
+
+	// Configured keywords.
+	Keywords() []string
+
+	// The Page Kind. One of page, home, section, taxonomy, taxonomyTerm.
+	Kind() string
+
+	// The configured layout to use to render this page. Typically set in front matter.
+	Layout() string
+
+	// The title used for links.
+	LinkTitle() string
+
+	// IsNode returns whether this is an item of one of the list types in Hugo,
+	// i.e. not a regular content
+	IsNode() bool
+
+	// IsPage returns whether this is a regular content
+	IsPage() bool
+
+	// Param looks for a param in Page and then in Site config.
+	Param(key interface{}) (interface{}, error)
+
+	// Path gets the relative path, including file name and extension if relevant,
+	// to the source of this Page. It will be relative to any content root.
+	Path() string
+
+	// The slug, typically defined in front matter.
+	Slug() string
+
+	// This page's language code. Will be the same as the site's.
+	Lang() string
+
+	// IsSection returns whether this is a section
+	IsSection() bool
+
+	// Section returns the first path element below the content root.
+	Section() string
+
+	// Returns a slice of sections (directories if it's a file) to this
+	// Page.
+	SectionsEntries() []string
+
+	// SectionsPath is SectionsEntries joined with a /.
+	SectionsPath() string
+
+	// Sitemap returns the sitemap configuration for this page.
+	Sitemap() config.Sitemap
+
+	// Type is a discriminator used to select layouts etc. It is typically set
+	// in front matter, but will fall back to the root section.
+	Type() string
+
+	// The configured weight, used as the first sort value in the default
+	// page sort if non-zero.
+	Weight() int
+}
+
+// PageRenderProvider provides a way for a Page to render itself.
+type PageRenderProvider interface {
+	Render(layout ...string) template.HTML
+}
+
+// PageWithoutContent is the Page without any of the content methods.
+type PageWithoutContent interface {
+	RawContentProvider
+	resource.Resource
+	PageMetaProvider
+	resource.LanguageProvider
+
+	// For pages backed by a file.
+	FileProvider
+
+	// Output formats
+	OutputFormatsProvider
+	AlternativeOutputFormatsProvider
+
+	// Tree navigation
+	ChildCareProvider
+	TreeProvider
+
+	// Horisontal navigation
+	InSectionPositioner
+	PageRenderProvider
+	PaginatorProvider
+	Positioner
+	navigation.PageMenusProvider
+
+	// TODO(bep)
+	AuthorProvider
+
+	// Page lookups/refs
+	GetPageProvider
+	RefProvider
+
+	resource.TranslationKeyProvider
+	TranslationsProvider
+
+	SitesProvider
+
+	// Helper methods
+	ShortcodeInfoProvider
+	compare.Eqer
+	maps.Scratcher
+	RelatedKeywordsProvider
+
+	DeprecatedWarningPageMethods
+}
+
+// Positioner provides next/prev navigation.
+type Positioner interface {
+	Next() Page
+	Prev() Page
+
+	// Deprecated: Use Prev. Will be removed in Hugo 0.57
+	PrevPage() Page
+
+	// Deprecated: Use Next. Will be removed in Hugo 0.57
+	NextPage() Page
+}
+
+// RawContentProvider provides the raw, unprocessed content of the page.
+type RawContentProvider interface {
+	RawContent() string
+}
+
+// RefProvider provides the methods needed to create reflinks to pages.
+type RefProvider interface {
+	Ref(argsm map[string]interface{}) (string, error)
+	RefFrom(argsm map[string]interface{}, source interface{}) (string, error)
+	RelRef(argsm map[string]interface{}) (string, error)
+	RelRefFrom(argsm map[string]interface{}, source interface{}) (string, error)
+}
+
+// RelatedKeywordsProvider allows a Page to be indexed.
+type RelatedKeywordsProvider interface {
+	// Make it indexable as a related.Document
+	RelatedKeywords(cfg related.IndexConfig) ([]related.Keyword, error)
+}
+
+// ShortcodeInfoProvider provides info about the shortcodes in a Page.
+type ShortcodeInfoProvider interface {
+	// HasShortcode return whether the page has a shortcode with the given name.
+	// This method is mainly motivated with the Hugo Docs site's need for a list
+	// of pages with the `todo` shortcode in it.
+	HasShortcode(name string) bool
+}
+
+// SitesProvider provide accessors to get sites.
+type SitesProvider interface {
+	Site() Site
+	Sites() Sites
+}
+
+// TableOfContentsProvider provides the table of contents for a Page.
+type TableOfContentsProvider interface {
+	TableOfContents() template.HTML
+}
+
+// TranslationsProvider provides access to any translations.
+type TranslationsProvider interface {
+
+	// IsTranslated returns whether this content file is translated to
+	// other language(s).
+	IsTranslated() bool
+
+	// AllTranslations returns all translations, including the current Page.
+	AllTranslations() Pages
+
+	// Translations returns the translations excluding the current Page.
+	Translations() Pages
+}
+
+// TreeProvider provides section tree navigation.
+type TreeProvider interface {
+
+	// IsAncestor returns whether the current page is an ancestor of the given
+	// Note that this method is not relevant for taxonomy lists and taxonomy terms pages.
+	IsAncestor(other interface{}) (bool, error)
+
+	// CurrentSection returns the page's current section or the page itself if home or a section.
+	// Note that this will return nil for pages that is not regular, home or section pages.
+	CurrentSection() Page
+
+	// IsDescendant returns whether the current page is a descendant of the given
+	// Note that this method is not relevant for taxonomy lists and taxonomy terms pages.
+	IsDescendant(other interface{}) (bool, error)
+
+	// FirstSection returns the section on level 1 below home, e.g. "/docs".
+	// For the home page, this will return itself.
+	FirstSection() Page
+
+	// InSection returns whether the given page is in the current section.
+	// Note that this will always return false for pages that are
+	// not either regular, home or section pages.
+	InSection(other interface{}) (bool, error)
+
+	// Parent returns a section's parent section or a page's section.
+	// To get a section's subsections, see Page's Sections method.
+	Parent() Page
+
+	// Sections returns this section's subsections, if any.
+	// Note that for non-sections, this method will always return an empty list.
+	Sections() Pages
+}
+
+// DeprecatedWarningPageMethods lists deprecated Page methods that will trigger
+// a WARNING if invoked.
+// This was added in Hugo 0.55.
+type DeprecatedWarningPageMethods interface {
+	source.FileWithoutOverlap
+	DeprecatedWarningPageMethods1
+}
+
+type DeprecatedWarningPageMethods1 interface {
+	IsDraft() bool
+	Hugo() hugo.Info
+	LanguagePrefix() string
+	GetParam(key string) interface{}
+	RSSLink() template.URL
+	URL() string
+}
+
+// Move here to trigger ERROR instead of WARNING.
+// TODO(bep) create wrappers and put into the Page once it has some methods.
+type DeprecatedErrorPageMethods interface {
+}
diff --git a/hugolib/author.go b/resources/page/page_author.go
similarity index 94%
rename from hugolib/author.go
rename to resources/page/page_author.go
index 0f4327097e9..9e8a95182f9 100644
--- a/hugolib/author.go
+++ b/resources/page/page_author.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -11,7 +11,7 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package hugolib
+package page
 
 // AuthorList is a list of all authors and their metadata.
 type AuthorList map[string]Author
diff --git a/resources/page/page_data.go b/resources/page/page_data.go
new file mode 100644
index 00000000000..3345a44dac5
--- /dev/null
+++ b/resources/page/page_data.go
@@ -0,0 +1,42 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package page contains the core interfaces and types for the Page resource,
+// a core component in Hugo.
+package page
+
+import (
+	"fmt"
+)
+
+// Data represents the .Data element in a Page in Hugo. We make this
+// a type so we can do lazy loading of .Data.Pages
+type Data map[string]interface{}
+
+// Pages returns the pages stored with key "pages". If this is a func,
+// it will be invoked.
+func (d Data) Pages() Pages {
+	v, found := d["pages"]
+	if !found {
+		return nil
+	}
+
+	switch vv := v.(type) {
+	case Pages:
+		return vv
+	case func() Pages:
+		return vv()
+	default:
+		panic(fmt.Sprintf("%T is not Pages", v))
+	}
+}
diff --git a/resources/page/page_data_test.go b/resources/page/page_data_test.go
new file mode 100644
index 00000000000..b6641bcd781
--- /dev/null
+++ b/resources/page/page_data_test.go
@@ -0,0 +1,57 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+	"bytes"
+	"testing"
+
+	"text/template"
+
+	"github.com/stretchr/testify/require"
+)
+
+func TestPageData(t *testing.T) {
+	assert := require.New(t)
+
+	data := make(Data)
+
+	assert.Nil(data.Pages())
+
+	pages := Pages{
+		&testPage{title: "a1"},
+		&testPage{title: "a2"},
+	}
+
+	data["pages"] = pages
+
+	assert.Equal(pages, data.Pages())
+
+	data["pages"] = func() Pages {
+		return pages
+	}
+
+	assert.Equal(pages, data.Pages())
+
+	templ, err := template.New("").Parse(`Pages: {{ .Pages }}`)
+
+	assert.NoError(err)
+
+	var buff bytes.Buffer
+
+	assert.NoError(templ.Execute(&buff, data))
+
+	assert.Contains(buff.String(), "Pages(2)")
+
+}
diff --git a/resources/page/page_generate/.gitignore b/resources/page/page_generate/.gitignore
new file mode 100644
index 00000000000..84fd70a9f76
--- /dev/null
+++ b/resources/page/page_generate/.gitignore
@@ -0,0 +1 @@
+generate
\ No newline at end of file
diff --git a/resources/page/page_generate/generate_page_wrappers.go b/resources/page/page_generate/generate_page_wrappers.go
new file mode 100644
index 00000000000..af85cb429a5
--- /dev/null
+++ b/resources/page/page_generate/generate_page_wrappers.go
@@ -0,0 +1,212 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page_generate
+
+import (
+	"bytes"
+	"fmt"
+	"os"
+	"path/filepath"
+	"reflect"
+
+	"github.com/pkg/errors"
+
+	"github.com/gohugoio/hugo/common/maps"
+
+	"github.com/gohugoio/hugo/codegen"
+	"github.com/gohugoio/hugo/resources/page"
+	"github.com/gohugoio/hugo/source"
+)
+
+const header = `// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file is autogenerated.
+`
+
+var (
+	fileInterfaceDeprecated = reflect.TypeOf((*source.FileWithoutOverlap)(nil)).Elem()
+	pageInterfaceDeprecated = reflect.TypeOf((*page.DeprecatedWarningPageMethods)(nil)).Elem()
+	pageInterface           = reflect.TypeOf((*page.Page)(nil)).Elem()
+
+	packageDir = filepath.FromSlash("resources/page")
+)
+
+func Generate(c *codegen.Inspector) error {
+	if err := generateMarshalJSON(c); err != nil {
+		return errors.Wrap(err, "failed to generate JSON marshaler")
+
+	}
+
+	if err := generateDeprecatedWrappers(c); err != nil {
+		return errors.Wrap(err, "failed to generate deprecate wrappers")
+	}
+
+	return nil
+}
+
+func generateMarshalJSON(c *codegen.Inspector) error {
+	filename := filepath.Join(c.ProjectRootDir, packageDir, "page_marshaljson.autogen.go")
+	f, err := os.Create(filename)
+
+	if err != nil {
+		return err
+	}
+	defer f.Close()
+
+	includes := []reflect.Type{pageInterface}
+
+	// Exclude these methods
+	excludes := []reflect.Type{
+		// We need to eveluate the deprecated vs JSON in the future,
+		// but leave them out for now.
+		pageInterfaceDeprecated,
+
+		// Leave this out for now. We need to revisit the author issue.
+		reflect.TypeOf((*page.AuthorProvider)(nil)).Elem(),
+
+		// navigation.PageMenus
+
+		// Prevent loops.
+		reflect.TypeOf((*page.SitesProvider)(nil)).Elem(),
+		reflect.TypeOf((*page.Positioner)(nil)).Elem(),
+
+		reflect.TypeOf((*page.ChildCareProvider)(nil)).Elem(),
+		reflect.TypeOf((*page.TreeProvider)(nil)).Elem(),
+		reflect.TypeOf((*page.InSectionPositioner)(nil)).Elem(),
+		reflect.TypeOf((*page.PaginatorProvider)(nil)).Elem(),
+		reflect.TypeOf((*maps.Scratcher)(nil)).Elem(),
+	}
+
+	methods := c.MethodsFromTypes(
+		includes,
+		excludes)
+
+	if len(methods) == 0 {
+		return errors.New("no methods found")
+	}
+
+	marshalJSON, pkgImports := methods.ToMarshalJSON("Page", "github.com/gohugoio/hugo/resources/page")
+
+	fmt.Fprintf(f, `%s
+
+package page
+
+%s
+
+
+%s
+
+
+`, header, importsString(pkgImports), marshalJSON)
+
+	return nil
+}
+
+func generateDeprecatedWrappers(c *codegen.Inspector) error {
+	filename := filepath.Join(c.ProjectRootDir, packageDir, "page_wrappers.autogen.go")
+	f, err := os.Create(filename)
+	if err != nil {
+		return err
+	}
+	defer f.Close()
+
+	// Generate a wrapper for deprecated page methods
+
+	reasons := map[string]string{
+		"IsDraft":        "Use .Draft.",
+		"Hugo":           "Use the global hugo function.",
+		"LanguagePrefix": "Use .Site.LanguagePrefix.",
+		"GetParam":       "Use .Param or .Params.myParam.",
+		"RSSLink": `Use the Output Format's link, e.g. something like: 
+    {{ with .OutputFormats.Get "RSS" }}{{ . RelPermalink }}{{ end }}`,
+		"URL": "Use .Permalink or .RelPermalink. If what you want is the front matter URL value, use .Params.url",
+	}
+
+	deprecated := func(name string, tp reflect.Type) string {
+		var alternative string
+		if tp == fileInterfaceDeprecated {
+			alternative = "Use .File." + name
+		} else {
+			var found bool
+			alternative, found = reasons[name]
+			if !found {
+				panic(fmt.Sprintf("no deprecated reason found for %q", name))
+			}
+		}
+
+		return fmt.Sprintf("helpers.Deprecated(%q, %q, %q, false)", "Page", "."+name, alternative)
+	}
+
+	var buff bytes.Buffer
+
+	methods := c.MethodsFromTypes([]reflect.Type{fileInterfaceDeprecated, pageInterfaceDeprecated}, nil)
+
+	for _, m := range methods {
+		fmt.Fprint(&buff, m.Declaration("*pageDeprecated"))
+		fmt.Fprintln(&buff, " {")
+		fmt.Fprintf(&buff, "\t%s\n", deprecated(m.Name, m.Owner))
+		fmt.Fprintf(&buff, "\t%s\n}\n", m.Delegate("p", "p"))
+
+	}
+
+	pkgImports := append(methods.Imports(), "github.com/gohugoio/hugo/helpers")
+
+	fmt.Fprintf(f, `%s
+
+package page
+
+%s
+// NewDeprecatedWarningPage adds deprecation warnings to the given implementation.
+func NewDeprecatedWarningPage(p DeprecatedWarningPageMethods) DeprecatedWarningPageMethods {
+	return &pageDeprecated{p: p}
+}
+
+type pageDeprecated struct {
+	p DeprecatedWarningPageMethods
+}
+
+%s
+
+`, header, importsString(pkgImports), buff.String())
+
+	return nil
+}
+
+func importsString(imps []string) string {
+	if len(imps) == 0 {
+		return ""
+	}
+
+	if len(imps) == 1 {
+		return fmt.Sprintf("import %q", imps[0])
+	}
+
+	impsStr := "import (\n"
+	for _, imp := range imps {
+		impsStr += fmt.Sprintf("%q\n", imp)
+	}
+
+	return impsStr + ")"
+}
diff --git a/common/hugo/site.go b/resources/page/page_kinds.go
similarity index 61%
rename from common/hugo/site.go
rename to resources/page/page_kinds.go
index 08391858a1b..a2e59438ef0 100644
--- a/common/hugo/site.go
+++ b/resources/page/page_kinds.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -11,14 +11,15 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package hugo
+package page
 
-import "github.com/gohugoio/hugo/langs"
+const (
+	KindPage = "page"
 
-// Site represents a site in the build. This is currently a very narrow interface,
-// but the actual implementation will be richer, see hugolib.SiteInfo.
-type Site interface {
-	Language() *langs.Language
-	IsServer() bool
-	Hugo() Info
-}
+	// The rest are node types; home page, sections etc.
+
+	KindHome         = "home"
+	KindSection      = "section"
+	KindTaxonomy     = "taxonomy"
+	KindTaxonomyTerm = "taxonomyTerm"
+)
diff --git a/resources/page/page_kinds_test.go b/resources/page/page_kinds_test.go
new file mode 100644
index 00000000000..8ad7343dcdc
--- /dev/null
+++ b/resources/page/page_kinds_test.go
@@ -0,0 +1,31 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+	"testing"
+
+	"github.com/stretchr/testify/require"
+)
+
+func TestKind(t *testing.T) {
+	t.Parallel()
+	// Add tests for these constants to make sure they don't change
+	require.Equal(t, "page", KindPage)
+	require.Equal(t, "home", KindHome)
+	require.Equal(t, "section", KindSection)
+	require.Equal(t, "taxonomy", KindTaxonomy)
+	require.Equal(t, "taxonomyTerm", KindTaxonomyTerm)
+
+}
diff --git a/resources/page/page_marshaljson.autogen.go b/resources/page/page_marshaljson.autogen.go
new file mode 100644
index 00000000000..5f4c9d32fd4
--- /dev/null
+++ b/resources/page/page_marshaljson.autogen.go
@@ -0,0 +1,198 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file is autogenerated.
+
+package page
+
+import (
+	"encoding/json"
+	"github.com/gohugoio/hugo/config"
+	"github.com/gohugoio/hugo/langs"
+	"github.com/gohugoio/hugo/media"
+	"github.com/gohugoio/hugo/navigation"
+	"github.com/gohugoio/hugo/source"
+	"html/template"
+	"time"
+)
+
+func MarshalPageToJSON(p Page) ([]byte, error) {
+	content, err := p.Content()
+	if err != nil {
+		return nil, err
+	}
+	plain := p.Plain()
+	plainWords := p.PlainWords()
+	summary := p.Summary()
+	truncated := p.Truncated()
+	fuzzyWordCount := p.FuzzyWordCount()
+	wordCount := p.WordCount()
+	readingTime := p.ReadingTime()
+	length := p.Len()
+	tableOfContents := p.TableOfContents()
+	rawContent := p.RawContent()
+	mediaType := p.MediaType()
+	resourceType := p.ResourceType()
+	permalink := p.Permalink()
+	relPermalink := p.RelPermalink()
+	name := p.Name()
+	title := p.Title()
+	params := p.Params()
+	data := p.Data()
+	date := p.Date()
+	lastmod := p.Lastmod()
+	publishDate := p.PublishDate()
+	expiryDate := p.ExpiryDate()
+	aliases := p.Aliases()
+	bundleType := p.BundleType()
+	description := p.Description()
+	draft := p.Draft()
+	isHome := p.IsHome()
+	keywords := p.Keywords()
+	kind := p.Kind()
+	layout := p.Layout()
+	linkTitle := p.LinkTitle()
+	isNode := p.IsNode()
+	isPage := p.IsPage()
+	path := p.Path()
+	slug := p.Slug()
+	lang := p.Lang()
+	isSection := p.IsSection()
+	section := p.Section()
+	sectionsEntries := p.SectionsEntries()
+	sectionsPath := p.SectionsPath()
+	sitemap := p.Sitemap()
+	typ := p.Type()
+	weight := p.Weight()
+	language := p.Language()
+	file := p.File()
+	outputFormats := p.OutputFormats()
+	alternativeOutputFormats := p.AlternativeOutputFormats()
+	menus := p.Menus()
+	translationKey := p.TranslationKey()
+	isTranslated := p.IsTranslated()
+	allTranslations := p.AllTranslations()
+	translations := p.Translations()
+
+	s := struct {
+		Content                  interface{}
+		Plain                    string
+		PlainWords               []string
+		Summary                  template.HTML
+		Truncated                bool
+		FuzzyWordCount           int
+		WordCount                int
+		ReadingTime              int
+		Len                      int
+		TableOfContents          template.HTML
+		RawContent               string
+		MediaType                media.Type
+		ResourceType             string
+		Permalink                string
+		RelPermalink             string
+		Name                     string
+		Title                    string
+		Params                   map[string]interface{}
+		Data                     interface{}
+		Date                     time.Time
+		Lastmod                  time.Time
+		PublishDate              time.Time
+		ExpiryDate               time.Time
+		Aliases                  []string
+		BundleType               string
+		Description              string
+		Draft                    bool
+		IsHome                   bool
+		Keywords                 []string
+		Kind                     string
+		Layout                   string
+		LinkTitle                string
+		IsNode                   bool
+		IsPage                   bool
+		Path                     string
+		Slug                     string
+		Lang                     string
+		IsSection                bool
+		Section                  string
+		SectionsEntries          []string
+		SectionsPath             string
+		Sitemap                  config.Sitemap
+		Type                     string
+		Weight                   int
+		Language                 *langs.Language
+		File                     source.File
+		OutputFormats            OutputFormats
+		AlternativeOutputFormats OutputFormats
+		Menus                    navigation.PageMenus
+		TranslationKey           string
+		IsTranslated             bool
+		AllTranslations          Pages
+		Translations             Pages
+	}{
+		Content:                  content,
+		Plain:                    plain,
+		PlainWords:               plainWords,
+		Summary:                  summary,
+		Truncated:                truncated,
+		FuzzyWordCount:           fuzzyWordCount,
+		WordCount:                wordCount,
+		ReadingTime:              readingTime,
+		Len:                      length,
+		TableOfContents:          tableOfContents,
+		RawContent:               rawContent,
+		MediaType:                mediaType,
+		ResourceType:             resourceType,
+		Permalink:                permalink,
+		RelPermalink:             relPermalink,
+		Name:                     name,
+		Title:                    title,
+		Params:                   params,
+		Data:                     data,
+		Date:                     date,
+		Lastmod:                  lastmod,
+		PublishDate:              publishDate,
+		ExpiryDate:               expiryDate,
+		Aliases:                  aliases,
+		BundleType:               bundleType,
+		Description:              description,
+		Draft:                    draft,
+		IsHome:                   isHome,
+		Keywords:                 keywords,
+		Kind:                     kind,
+		Layout:                   layout,
+		LinkTitle:                linkTitle,
+		IsNode:                   isNode,
+		IsPage:                   isPage,
+		Path:                     path,
+		Slug:                     slug,
+		Lang:                     lang,
+		IsSection:                isSection,
+		Section:                  section,
+		SectionsEntries:          sectionsEntries,
+		SectionsPath:             sectionsPath,
+		Sitemap:                  sitemap,
+		Type:                     typ,
+		Weight:                   weight,
+		Language:                 language,
+		File:                     file,
+		OutputFormats:            outputFormats,
+		AlternativeOutputFormats: alternativeOutputFormats,
+		Menus:                    menus,
+		TranslationKey:           translationKey,
+		IsTranslated:             isTranslated,
+		AllTranslations:          allTranslations,
+		Translations:             translations,
+	}
+
+	return json.Marshal(&s)
+}
diff --git a/resources/page/page_nop.go b/resources/page/page_nop.go
new file mode 100644
index 00000000000..7afbee216c8
--- /dev/null
+++ b/resources/page/page_nop.go
@@ -0,0 +1,463 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package page contains the core interfaces and types for the Page resource,
+// a core component in Hugo.
+package page
+
+import (
+	"html/template"
+	"os"
+	"time"
+
+	"github.com/bep/gitmap"
+	"github.com/gohugoio/hugo/navigation"
+
+	"github.com/gohugoio/hugo/common/hugo"
+	"github.com/gohugoio/hugo/common/maps"
+	"github.com/gohugoio/hugo/source"
+
+	"github.com/gohugoio/hugo/config"
+	"github.com/gohugoio/hugo/langs"
+	"github.com/gohugoio/hugo/media"
+	"github.com/gohugoio/hugo/related"
+	"github.com/gohugoio/hugo/resources/resource"
+)
+
+var (
+	NopPage Page = new(nopPage)
+	NilPage *nopPage
+)
+
+// PageNop implements Page, but does nothing.
+type nopPage int
+
+func (p *nopPage) Aliases() []string {
+	return nil
+}
+
+func (p *nopPage) Sitemap() config.Sitemap {
+	return config.Sitemap{}
+}
+
+func (p *nopPage) Layout() string {
+	return ""
+}
+
+func (p *nopPage) RSSLink() template.URL {
+	return ""
+}
+
+func (p *nopPage) Author() Author {
+	return Author{}
+
+}
+func (p *nopPage) Authors() AuthorList {
+	return nil
+}
+
+func (p *nopPage) AllTranslations() Pages {
+	return nil
+}
+
+func (p *nopPage) LanguagePrefix() string {
+	return ""
+}
+
+func (p *nopPage) AlternativeOutputFormats() OutputFormats {
+	return nil
+}
+
+func (p *nopPage) BaseFileName() string {
+	return ""
+}
+
+func (p *nopPage) BundleType() string {
+	return ""
+}
+
+func (p *nopPage) Content() (interface{}, error) {
+	return "", nil
+}
+
+func (p *nopPage) ContentBaseName() string {
+	return ""
+}
+
+func (p *nopPage) CurrentSection() Page {
+	return nil
+}
+
+func (p *nopPage) Data() interface{} {
+	return nil
+}
+
+func (p *nopPage) Date() (t time.Time) {
+	return
+}
+
+func (p *nopPage) Description() string {
+	return ""
+}
+
+func (p *nopPage) RefFrom(argsm map[string]interface{}, source interface{}) (string, error) {
+	return "", nil
+}
+func (p *nopPage) RelRefFrom(argsm map[string]interface{}, source interface{}) (string, error) {
+	return "", nil
+}
+
+func (p *nopPage) Dir() string {
+	return ""
+}
+
+func (p *nopPage) Draft() bool {
+	return false
+}
+
+func (p *nopPage) Eq(other interface{}) bool {
+	return p == other
+}
+
+func (p *nopPage) ExpiryDate() (t time.Time) {
+	return
+}
+
+func (p *nopPage) Ext() string {
+	return ""
+}
+
+func (p *nopPage) Extension() string {
+	return ""
+}
+
+var nilFile *source.FileInfo
+
+func (p *nopPage) File() source.File {
+	return nilFile
+}
+
+func (p *nopPage) FileInfo() os.FileInfo {
+	return nil
+}
+
+func (p *nopPage) Filename() string {
+	return ""
+}
+
+func (p *nopPage) FirstSection() Page {
+	return nil
+}
+
+func (p *nopPage) FuzzyWordCount() int {
+	return 0
+}
+
+func (p *nopPage) GetPage(ref string) (Page, error) {
+	return nil, nil
+}
+
+func (p *nopPage) GetParam(key string) interface{} {
+	return nil
+}
+
+func (p *nopPage) GitInfo() *gitmap.GitInfo {
+	return nil
+}
+
+func (p *nopPage) HasMenuCurrent(menuID string, me *navigation.MenuEntry) bool {
+	return false
+}
+
+func (p *nopPage) HasShortcode(name string) bool {
+	return false
+}
+
+func (p *nopPage) Hugo() (h hugo.Info) {
+	return
+}
+
+func (p *nopPage) InSection(other interface{}) (bool, error) {
+	return false, nil
+}
+
+func (p *nopPage) IsAncestor(other interface{}) (bool, error) {
+	return false, nil
+}
+
+func (p *nopPage) IsDescendant(other interface{}) (bool, error) {
+	return false, nil
+}
+
+func (p *nopPage) IsDraft() bool {
+	return false
+}
+
+func (p *nopPage) IsHome() bool {
+	return false
+}
+
+func (p *nopPage) IsMenuCurrent(menuID string, inme *navigation.MenuEntry) bool {
+	return false
+}
+
+func (p *nopPage) IsNode() bool {
+	return false
+}
+
+func (p *nopPage) IsPage() bool {
+	return false
+}
+
+func (p *nopPage) IsSection() bool {
+	return false
+}
+
+func (p *nopPage) IsTranslated() bool {
+	return false
+}
+
+func (p *nopPage) Keywords() []string {
+	return nil
+}
+
+func (p *nopPage) Kind() string {
+	return ""
+}
+
+func (p *nopPage) Lang() string {
+	return ""
+}
+
+func (p *nopPage) Language() *langs.Language {
+	return nil
+}
+
+func (p *nopPage) Lastmod() (t time.Time) {
+	return
+}
+
+func (p *nopPage) Len() int {
+	return 0
+}
+
+func (p *nopPage) LinkTitle() string {
+	return ""
+}
+
+func (p *nopPage) LogicalName() string {
+	return ""
+}
+
+func (p *nopPage) MediaType() (m media.Type) {
+	return
+}
+
+func (p *nopPage) Menus() (m navigation.PageMenus) {
+	return
+}
+
+func (p *nopPage) Name() string {
+	return ""
+}
+
+func (p *nopPage) Next() Page {
+	return nil
+}
+
+func (p *nopPage) OutputFormats() OutputFormats {
+	return nil
+}
+
+func (p *nopPage) Pages() Pages {
+	return nil
+}
+
+func (p *nopPage) Paginate(seq interface{}, options ...interface{}) (*Pager, error) {
+	return nil, nil
+}
+
+func (p *nopPage) Paginator(options ...interface{}) (*Pager, error) {
+	return nil, nil
+}
+
+func (p *nopPage) Param(key interface{}) (interface{}, error) {
+	return nil, nil
+}
+
+func (p *nopPage) Params() map[string]interface{} {
+	return nil
+}
+
+func (p *nopPage) Parent() Page {
+	return nil
+}
+
+func (p *nopPage) Path() string {
+	return ""
+}
+
+func (p *nopPage) Permalink() string {
+	return ""
+}
+
+func (p *nopPage) Plain() string {
+	return ""
+}
+
+func (p *nopPage) PlainWords() []string {
+	return nil
+}
+
+func (p *nopPage) Prev() Page {
+	return nil
+}
+
+func (p *nopPage) PublishDate() (t time.Time) {
+	return
+}
+
+func (p *nopPage) PrevInSection() Page {
+	return nil
+}
+func (p *nopPage) NextInSection() Page {
+	return nil
+}
+
+func (p *nopPage) PrevPage() Page {
+	return nil
+}
+
+func (p *nopPage) NextPage() Page {
+	return nil
+}
+
+func (p *nopPage) RawContent() string {
+	return ""
+}
+
+func (p *nopPage) ReadingTime() int {
+	return 0
+}
+
+func (p *nopPage) Ref(argsm map[string]interface{}) (string, error) {
+	return "", nil
+}
+
+func (p *nopPage) RelPermalink() string {
+	return ""
+}
+
+func (p *nopPage) RelRef(argsm map[string]interface{}) (string, error) {
+	return "", nil
+}
+
+func (p *nopPage) Render(layout ...string) template.HTML {
+	return ""
+}
+
+func (p *nopPage) ResourceType() string {
+	return ""
+}
+
+func (p *nopPage) Resources() resource.Resources {
+	return nil
+}
+
+func (p *nopPage) Scratch() *maps.Scratch {
+	return nil
+}
+
+func (p *nopPage) RelatedKeywords(cfg related.IndexConfig) ([]related.Keyword, error) {
+	return nil, nil
+}
+
+func (p *nopPage) Section() string {
+	return ""
+}
+
+func (p *nopPage) Sections() Pages {
+	return nil
+}
+
+func (p *nopPage) SectionsEntries() []string {
+	return nil
+}
+
+func (p *nopPage) SectionsPath() string {
+	return ""
+}
+
+func (p *nopPage) Site() Site {
+	return nil
+}
+
+func (p *nopPage) Sites() Sites {
+	return nil
+}
+
+func (p *nopPage) Slug() string {
+	return ""
+}
+
+func (p *nopPage) String() string {
+	return "nopPage"
+}
+
+func (p *nopPage) Summary() template.HTML {
+	return ""
+}
+
+func (p *nopPage) TableOfContents() template.HTML {
+	return ""
+}
+
+func (p *nopPage) Title() string {
+	return ""
+}
+
+func (p *nopPage) TranslationBaseName() string {
+	return ""
+}
+
+func (p *nopPage) TranslationKey() string {
+	return ""
+}
+
+func (p *nopPage) Translations() Pages {
+	return nil
+}
+
+func (p *nopPage) Truncated() bool {
+	return false
+}
+
+func (p *nopPage) Type() string {
+	return ""
+}
+
+func (p *nopPage) URL() string {
+	return ""
+}
+
+func (p *nopPage) UniqueID() string {
+	return ""
+}
+
+func (p *nopPage) Weight() int {
+	return 0
+}
+
+func (p *nopPage) WordCount() int {
+	return 0
+}
diff --git a/resources/page/page_outputformat.go b/resources/page/page_outputformat.go
new file mode 100644
index 00000000000..ff4213cc49b
--- /dev/null
+++ b/resources/page/page_outputformat.go
@@ -0,0 +1,85 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package page contains the core interfaces and types for the Page resource,
+// a core component in Hugo.
+package page
+
+import (
+	"strings"
+
+	"github.com/gohugoio/hugo/media"
+	"github.com/gohugoio/hugo/output"
+)
+
+// OutputFormats holds a list of the relevant output formats for a given page.
+type OutputFormats []OutputFormat
+
+// OutputFormat links to a representation of a resource.
+type OutputFormat struct {
+	// Rel constains a value that can be used to construct a rel link.
+	// This is value is fetched from the output format definition.
+	// Note that for pages with only one output format,
+	// this method will always return "canonical".
+	// As an example, the AMP output format will, by default, return "amphtml".
+	//
+	// See:
+	// https://www.ampproject.org/docs/guides/deploy/discovery
+	//
+	// Most other output formats will have "alternate" as value for this.
+	Rel string
+
+	Format output.Format
+
+	relPermalink string
+	permalink    string
+}
+
+// Name returns this OutputFormat's name, i.e. HTML, AMP, JSON etc.
+func (o OutputFormat) Name() string {
+	return o.Format.Name
+}
+
+// MediaType returns this OutputFormat's MediaType (MIME type).
+func (o OutputFormat) MediaType() media.Type {
+	return o.Format.MediaType
+}
+
+// Permalink returns the absolute permalink to this output format.
+func (o OutputFormat) Permalink() string {
+	return o.permalink
+}
+
+// RelPermalink returns the relative permalink to this output format.
+func (o OutputFormat) RelPermalink() string {
+	return o.relPermalink
+}
+
+func NewOutputFormat(relPermalink, permalink string, isCanonical bool, f output.Format) OutputFormat {
+	rel := f.Rel
+	if isCanonical {
+		rel = "canonical"
+	}
+	return OutputFormat{Rel: rel, Format: f, relPermalink: relPermalink, permalink: permalink}
+}
+
+// Get gets a OutputFormat given its name, i.e. json, html etc.
+// It returns nil if none found.
+func (o OutputFormats) Get(name string) *OutputFormat {
+	for _, f := range o {
+		if strings.EqualFold(f.Format.Name, name) {
+			return &f
+		}
+	}
+	return nil
+}
diff --git a/resources/page/page_paths.go b/resources/page/page_paths.go
new file mode 100644
index 00000000000..160c225b12a
--- /dev/null
+++ b/resources/page/page_paths.go
@@ -0,0 +1,334 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+	"path"
+	"path/filepath"
+
+	"strings"
+
+	"github.com/gohugoio/hugo/helpers"
+	"github.com/gohugoio/hugo/output"
+)
+
+const slash = "/"
+
+// TargetPathDescriptor describes how a file path for a given resource
+// should look like on the file system. The same descriptor is then later used to
+// create both the permalinks and the relative links, paginator URLs etc.
+//
+// The big motivating behind this is to have only one source of truth for URLs,
+// and by that also get rid of most of the fragile string parsing/encoding etc.
+//
+//
+type TargetPathDescriptor struct {
+	PathSpec *helpers.PathSpec
+
+	Type output.Format
+	Kind string
+
+	Sections []string
+
+	// For regular content pages this is either
+	// 1) the Slug, if set,
+	// 2) the file base name (TranslationBaseName).
+	BaseName string
+
+	// Source directory.
+	Dir string
+
+	// Typically a language prefix added to file paths.
+	PrefixFilePath string
+
+	// Typically a language prefix added to links.
+	PrefixLink string
+
+	// If in multihost mode etc., every link/path needs to be prefixed, even
+	// if set in URL.
+	ForcePrefix bool
+
+	// URL from front matter if set. Will override any Slug etc.
+	URL string
+
+	// Used to create paginator links.
+	Addends string
+
+	// The expanded permalink if defined for the section, ready to use.
+	ExpandedPermalink string
+
+	// Some types cannot have uglyURLs, even if globally enabled, RSS being one example.
+	UglyURLs bool
+}
+
+// TODO(bep) move this type.
+type TargetPaths struct {
+
+	// Where to store the file on disk relative to the publish dir. OS slashes.
+	TargetFilename string
+
+	// The directory to write sub-resources of the above.
+	SubResourceBaseTarget string
+
+	// The base for creating links to sub-resources of the above.
+	SubResourceBaseLink string
+
+	// The relative permalink to this resources. Unix slashes.
+	Link string
+}
+
+func (p TargetPaths) RelPermalink(s *helpers.PathSpec) string {
+	return s.PrependBasePath(p.Link, false)
+}
+
+func (p TargetPaths) PermalinkForOutputFormat(s *helpers.PathSpec, f output.Format) string {
+	var baseURL string
+	var err error
+	if f.Protocol != "" {
+		baseURL, err = s.BaseURL.WithProtocol(f.Protocol)
+		if err != nil {
+			return ""
+		}
+	} else {
+		baseURL = s.BaseURL.String()
+	}
+
+	return s.PermalinkForBaseURL(p.Link, baseURL)
+}
+
+func isHtmlIndex(s string) bool {
+	return strings.HasSuffix(s, "/index.html")
+}
+
+func CreateTargetPaths(d TargetPathDescriptor) (tp TargetPaths) {
+
+	if d.Type.Name == "" {
+		panic("CreateTargetPath: missing type")
+	}
+
+	// Normalize all file Windows paths to simplify what's next.
+	if helpers.FilePathSeparator != slash {
+		d.Dir = filepath.ToSlash(d.Dir)
+		d.PrefixFilePath = filepath.ToSlash(d.PrefixFilePath)
+
+	}
+
+	pagePath := slash
+
+	var (
+		pagePathDir string
+		link        string
+		linkDir     string
+	)
+
+	// The top level index files, i.e. the home page etc., needs
+	// the index base even when uglyURLs is enabled.
+	needsBase := true
+
+	isUgly := d.UglyURLs && !d.Type.NoUgly
+	baseNameSameAsType := d.BaseName != "" && d.BaseName == d.Type.BaseName
+
+	if d.ExpandedPermalink == "" && baseNameSameAsType {
+		isUgly = true
+	}
+
+	if d.Kind != KindPage && d.URL == "" && len(d.Sections) > 0 {
+		if d.ExpandedPermalink != "" {
+			pagePath = pjoin(pagePath, d.ExpandedPermalink)
+		} else {
+			pagePath = pjoin(d.Sections...)
+		}
+		needsBase = false
+	}
+
+	if d.Type.Path != "" {
+		pagePath = pjoin(pagePath, d.Type.Path)
+	}
+
+	if d.Kind != KindHome && d.URL != "" {
+		pagePath = pjoin(pagePath, d.URL)
+
+		if d.Addends != "" {
+			pagePath = pjoin(pagePath, d.Addends)
+		}
+
+		pagePathDir = pagePath
+		link = pagePath
+		hasDot := strings.Contains(d.URL, ".")
+		hasSlash := strings.HasSuffix(d.URL, slash)
+
+		if hasSlash || !hasDot {
+			pagePath = pjoin(pagePath, d.Type.BaseName+d.Type.MediaType.FullSuffix())
+		} else if hasDot {
+			pagePathDir = path.Dir(pagePathDir)
+		}
+
+		if !isHtmlIndex(pagePath) {
+			link = pagePath
+		} else if !hasSlash {
+			link += slash
+		}
+
+		linkDir = pagePathDir
+
+		if d.ForcePrefix {
+
+			// Prepend language prefix if not already set in URL
+			if d.PrefixFilePath != "" && !strings.HasPrefix(d.URL, slash+d.PrefixFilePath) {
+				pagePath = pjoin(d.PrefixFilePath, pagePath)
+				pagePathDir = pjoin(d.PrefixFilePath, pagePathDir)
+			}
+
+			if d.PrefixLink != "" && !strings.HasPrefix(d.URL, slash+d.PrefixLink) {
+				link = pjoin(d.PrefixLink, link)
+				linkDir = pjoin(d.PrefixLink, linkDir)
+			}
+		}
+
+	} else if d.Kind == KindPage {
+
+		if d.ExpandedPermalink != "" {
+			pagePath = pjoin(pagePath, d.ExpandedPermalink)
+
+		} else {
+			if d.Dir != "" {
+				pagePath = pjoin(pagePath, d.Dir)
+			}
+			if d.BaseName != "" {
+				pagePath = pjoin(pagePath, d.BaseName)
+			}
+		}
+
+		if d.Addends != "" {
+			pagePath = pjoin(pagePath, d.Addends)
+		}
+
+		link = pagePath
+
+		if baseNameSameAsType {
+			link = strings.TrimSuffix(link, d.BaseName)
+		}
+
+		pagePathDir = link
+		link = link + slash
+		linkDir = pagePathDir
+
+		if isUgly {
+			pagePath = addSuffix(pagePath, d.Type.MediaType.FullSuffix())
+		} else {
+			pagePath = pjoin(pagePath, d.Type.BaseName+d.Type.MediaType.FullSuffix())
+		}
+
+		if isUgly && !isHtmlIndex(pagePath) {
+			link = pagePath
+		}
+
+		if d.PrefixFilePath != "" {
+			pagePath = pjoin(d.PrefixFilePath, pagePath)
+			pagePathDir = pjoin(d.PrefixFilePath, pagePathDir)
+		}
+
+		if d.PrefixLink != "" {
+			link = pjoin(d.PrefixLink, link)
+			linkDir = pjoin(d.PrefixLink, linkDir)
+		}
+
+	} else {
+		if d.Addends != "" {
+			pagePath = pjoin(pagePath, d.Addends)
+		}
+
+		needsBase = needsBase && d.Addends == ""
+
+		// No permalink expansion etc. for node type pages (for now)
+		base := ""
+
+		if needsBase || !isUgly {
+			base = d.Type.BaseName
+		}
+
+		pagePathDir = pagePath
+		link = pagePath
+		linkDir = pagePathDir
+
+		if base != "" {
+			pagePath = path.Join(pagePath, addSuffix(base, d.Type.MediaType.FullSuffix()))
+		} else {
+			pagePath = addSuffix(pagePath, d.Type.MediaType.FullSuffix())
+
+		}
+
+		if !isHtmlIndex(pagePath) {
+			link = pagePath
+		} else {
+			link += slash
+		}
+
+		if d.PrefixFilePath != "" {
+			pagePath = pjoin(d.PrefixFilePath, pagePath)
+			pagePathDir = pjoin(d.PrefixFilePath, pagePathDir)
+		}
+
+		if d.PrefixLink != "" {
+			link = pjoin(d.PrefixLink, link)
+			linkDir = pjoin(d.PrefixLink, linkDir)
+		}
+	}
+
+	pagePath = pjoin(slash, pagePath)
+	pagePathDir = strings.TrimSuffix(path.Join(slash, pagePathDir), slash)
+
+	hadSlash := strings.HasSuffix(link, slash)
+	link = strings.Trim(link, slash)
+	if hadSlash {
+		link += slash
+	}
+
+	if !strings.HasPrefix(link, slash) {
+		link = slash + link
+	}
+
+	linkDir = strings.TrimSuffix(path.Join(slash, linkDir), slash)
+
+	// Note: MakePathSanitized will lower case the path if
+	// disablePathToLower isn't set.
+	pagePath = d.PathSpec.MakePathSanitized(pagePath)
+	pagePathDir = d.PathSpec.MakePathSanitized(pagePathDir)
+	link = d.PathSpec.MakePathSanitized(link)
+	linkDir = d.PathSpec.MakePathSanitized(linkDir)
+
+	tp.TargetFilename = filepath.FromSlash(pagePath)
+	tp.SubResourceBaseTarget = filepath.FromSlash(pagePathDir)
+	tp.SubResourceBaseLink = linkDir
+	tp.Link = d.PathSpec.URLizeFilename(link)
+	if tp.Link == "" {
+		tp.Link = slash
+	}
+
+	return
+}
+
+func addSuffix(s, suffix string) string {
+	return strings.Trim(s, slash) + suffix
+}
+
+// Like path.Join, but preserves one trailing slash if present.
+func pjoin(elem ...string) string {
+	hadSlash := strings.HasSuffix(elem[len(elem)-1], slash)
+	joined := path.Join(elem...)
+	if hadSlash && !strings.HasSuffix(joined, slash) {
+		return joined + slash
+	}
+	return joined
+}
diff --git a/resources/page/page_paths_test.go b/resources/page/page_paths_test.go
new file mode 100644
index 00000000000..4aaa41e8acc
--- /dev/null
+++ b/resources/page/page_paths_test.go
@@ -0,0 +1,258 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+	"path/filepath"
+	"strings"
+	"testing"
+
+	"github.com/gohugoio/hugo/media"
+
+	"fmt"
+
+	"github.com/gohugoio/hugo/output"
+)
+
+func TestPageTargetPath(t *testing.T) {
+
+	pathSpec := newTestPathSpec()
+
+	noExtNoDelimMediaType := media.TextType
+	noExtNoDelimMediaType.Suffixes = []string{}
+	noExtNoDelimMediaType.Delimiter = ""
+
+	// Netlify style _redirects
+	noExtDelimFormat := output.Format{
+		Name:      "NER",
+		MediaType: noExtNoDelimMediaType,
+		BaseName:  "_redirects",
+	}
+
+	for _, langPrefixPath := range []string{"", "no"} {
+		for _, langPrefixLink := range []string{"", "no"} {
+			for _, uglyURLs := range []bool{false, true} {
+
+				tests := []struct {
+					name     string
+					d        TargetPathDescriptor
+					expected TargetPaths
+				}{
+					{"JSON home", TargetPathDescriptor{Kind: KindHome, Type: output.JSONFormat}, TargetPaths{TargetFilename: "/index.json", SubResourceBaseTarget: "", Link: "/index.json"}},
+					{"AMP home", TargetPathDescriptor{Kind: KindHome, Type: output.AMPFormat}, TargetPaths{TargetFilename: "/amp/index.html", SubResourceBaseTarget: "/amp", Link: "/amp/"}},
+					{"HTML home", TargetPathDescriptor{Kind: KindHome, BaseName: "_index", Type: output.HTMLFormat}, TargetPaths{TargetFilename: "/index.html", SubResourceBaseTarget: "", Link: "/"}},
+					{"Netlify redirects", TargetPathDescriptor{Kind: KindHome, BaseName: "_index", Type: noExtDelimFormat}, TargetPaths{TargetFilename: "/_redirects", SubResourceBaseTarget: "", Link: "/_redirects"}},
+					{"HTML section list", TargetPathDescriptor{
+						Kind:     KindSection,
+						Sections: []string{"sect1"},
+						BaseName: "_index",
+						Type:     output.HTMLFormat}, TargetPaths{TargetFilename: "/sect1/index.html", SubResourceBaseTarget: "/sect1", Link: "/sect1/"}},
+					{"HTML taxonomy list", TargetPathDescriptor{
+						Kind:     KindTaxonomy,
+						Sections: []string{"tags", "hugo"},
+						BaseName: "_index",
+						Type:     output.HTMLFormat}, TargetPaths{TargetFilename: "/tags/hugo/index.html", SubResourceBaseTarget: "/tags/hugo", Link: "/tags/hugo/"}},
+					{"HTML taxonomy term", TargetPathDescriptor{
+						Kind:     KindTaxonomy,
+						Sections: []string{"tags"},
+						BaseName: "_index",
+						Type:     output.HTMLFormat}, TargetPaths{TargetFilename: "/tags/index.html", SubResourceBaseTarget: "/tags", Link: "/tags/"}},
+					{
+						"HTML page", TargetPathDescriptor{
+							Kind:     KindPage,
+							Dir:      "/a/b",
+							BaseName: "mypage",
+							Sections: []string{"a"},
+							Type:     output.HTMLFormat}, TargetPaths{TargetFilename: "/a/b/mypage/index.html", SubResourceBaseTarget: "/a/b/mypage", Link: "/a/b/mypage/"}},
+
+					{
+						"HTML page with index as base", TargetPathDescriptor{
+							Kind:     KindPage,
+							Dir:      "/a/b",
+							BaseName: "index",
+							Sections: []string{"a"},
+							Type:     output.HTMLFormat}, TargetPaths{TargetFilename: "/a/b/index.html", SubResourceBaseTarget: "/a/b", Link: "/a/b/"}},
+
+					{
+						"HTML page with special chars", TargetPathDescriptor{
+							Kind:     KindPage,
+							Dir:      "/a/b",
+							BaseName: "My Page!",
+							Type:     output.HTMLFormat}, TargetPaths{TargetFilename: "/a/b/my-page/index.html", SubResourceBaseTarget: "/a/b/my-page", Link: "/a/b/my-page/"}},
+					{"RSS home", TargetPathDescriptor{Kind: "rss", Type: output.RSSFormat}, TargetPaths{TargetFilename: "/index.xml", SubResourceBaseTarget: "", Link: "/index.xml"}},
+					{"RSS section list", TargetPathDescriptor{
+						Kind:     "rss",
+						Sections: []string{"sect1"},
+						Type:     output.RSSFormat}, TargetPaths{TargetFilename: "/sect1/index.xml", SubResourceBaseTarget: "/sect1", Link: "/sect1/index.xml"}},
+					{
+						"AMP page", TargetPathDescriptor{
+							Kind:     KindPage,
+							Dir:      "/a/b/c",
+							BaseName: "myamp",
+							Type:     output.AMPFormat}, TargetPaths{TargetFilename: "/amp/a/b/c/myamp/index.html", SubResourceBaseTarget: "/amp/a/b/c/myamp", Link: "/amp/a/b/c/myamp/"}},
+					{
+						"AMP page with URL with suffix", TargetPathDescriptor{
+							Kind:     KindPage,
+							Dir:      "/sect/",
+							BaseName: "mypage",
+							URL:      "/some/other/url.xhtml",
+							Type:     output.HTMLFormat}, TargetPaths{TargetFilename: "/some/other/url.xhtml", SubResourceBaseTarget: "/some/other", Link: "/some/other/url.xhtml"}},
+					{
+						"JSON page with URL without suffix", TargetPathDescriptor{
+							Kind:     KindPage,
+							Dir:      "/sect/",
+							BaseName: "mypage",
+							URL:      "/some/other/path/",
+							Type:     output.JSONFormat}, TargetPaths{TargetFilename: "/some/other/path/index.json", SubResourceBaseTarget: "/some/other/path", Link: "/some/other/path/index.json"}},
+					{
+						"JSON page with URL without suffix and no trailing slash", TargetPathDescriptor{
+							Kind:     KindPage,
+							Dir:      "/sect/",
+							BaseName: "mypage",
+							URL:      "/some/other/path",
+							Type:     output.JSONFormat}, TargetPaths{TargetFilename: "/some/other/path/index.json", SubResourceBaseTarget: "/some/other/path", Link: "/some/other/path/index.json"}},
+					{
+						"HTML page with URL without suffix and no trailing slash", TargetPathDescriptor{
+							Kind:     KindPage,
+							Dir:      "/sect/",
+							BaseName: "mypage",
+							URL:      "/some/other/path",
+							Type:     output.HTMLFormat}, TargetPaths{TargetFilename: "/some/other/path/index.html", SubResourceBaseTarget: "/some/other/path", Link: "/some/other/path/"}},
+					{
+						"HTML page with expanded permalink", TargetPathDescriptor{
+							Kind:              KindPage,
+							Dir:               "/a/b",
+							BaseName:          "mypage",
+							ExpandedPermalink: "/2017/10/my-title/",
+							Type:              output.HTMLFormat}, TargetPaths{TargetFilename: "/2017/10/my-title/index.html", SubResourceBaseTarget: "/2017/10/my-title", Link: "/2017/10/my-title/"}},
+					{
+						"Paginated HTML home", TargetPathDescriptor{
+							Kind:     KindHome,
+							BaseName: "_index",
+							Type:     output.HTMLFormat,
+							Addends:  "page/3"}, TargetPaths{TargetFilename: "/page/3/index.html", SubResourceBaseTarget: "/page/3", Link: "/page/3/"}},
+					{
+						"Paginated Taxonomy list", TargetPathDescriptor{
+							Kind:     KindTaxonomy,
+							BaseName: "_index",
+							Sections: []string{"tags", "hugo"},
+							Type:     output.HTMLFormat,
+							Addends:  "page/3"}, TargetPaths{TargetFilename: "/tags/hugo/page/3/index.html", SubResourceBaseTarget: "/tags/hugo/page/3", Link: "/tags/hugo/page/3/"}},
+					{
+						"Regular page with addend", TargetPathDescriptor{
+							Kind:     KindPage,
+							Dir:      "/a/b",
+							BaseName: "mypage",
+							Addends:  "c/d/e",
+							Type:     output.HTMLFormat}, TargetPaths{TargetFilename: "/a/b/mypage/c/d/e/index.html", SubResourceBaseTarget: "/a/b/mypage/c/d/e", Link: "/a/b/mypage/c/d/e/"}},
+				}
+
+				for i, test := range tests {
+					t.Run(fmt.Sprintf("langPrefixPath=%s,langPrefixLink=%s,uglyURLs=%t,name=%s", langPrefixPath, langPrefixLink, uglyURLs, test.name),
+						func(t *testing.T) {
+
+							test.d.ForcePrefix = true
+							test.d.PathSpec = pathSpec
+							test.d.UglyURLs = uglyURLs
+							test.d.PrefixFilePath = langPrefixPath
+							test.d.PrefixLink = langPrefixLink
+							test.d.Dir = filepath.FromSlash(test.d.Dir)
+							isUgly := uglyURLs && !test.d.Type.NoUgly
+
+							expected := test.expected
+
+							// TODO(bep) simplify
+							if test.d.Kind == KindPage && test.d.BaseName == test.d.Type.BaseName {
+							} else if test.d.Kind == KindHome && test.d.Type.Path != "" {
+							} else if test.d.Type.MediaType.Suffix() != "" && (!strings.HasPrefix(expected.TargetFilename, "/index") || test.d.Addends != "") && test.d.URL == "" && isUgly {
+								expected.TargetFilename = strings.Replace(expected.TargetFilename,
+									"/"+test.d.Type.BaseName+"."+test.d.Type.MediaType.Suffix(),
+									"."+test.d.Type.MediaType.Suffix(), 1)
+								expected.Link = strings.TrimSuffix(expected.Link, "/") + "." + test.d.Type.MediaType.Suffix()
+
+							}
+
+							if test.d.PrefixFilePath != "" && !strings.HasPrefix(test.d.URL, "/"+test.d.PrefixFilePath) {
+								expected.TargetFilename = "/" + test.d.PrefixFilePath + expected.TargetFilename
+								expected.SubResourceBaseTarget = "/" + test.d.PrefixFilePath + expected.SubResourceBaseTarget
+							}
+
+							if test.d.PrefixLink != "" && !strings.HasPrefix(test.d.URL, "/"+test.d.PrefixLink) {
+								expected.Link = "/" + test.d.PrefixLink + expected.Link
+							}
+
+							expected.TargetFilename = filepath.FromSlash(expected.TargetFilename)
+							expected.SubResourceBaseTarget = filepath.FromSlash(expected.SubResourceBaseTarget)
+
+							pagePath := CreateTargetPaths(test.d)
+
+							if !eqTargetPaths(pagePath, expected) {
+								t.Fatalf("[%d] [%s] targetPath expected\n%#v, got:\n%#v", i, test.name, expected, pagePath)
+
+							}
+						})
+				}
+			}
+
+		}
+	}
+}
+
+func TestPageTargetPathPrefix(t *testing.T) {
+	pathSpec := newTestPathSpec()
+	tests := []struct {
+		name     string
+		d        TargetPathDescriptor
+		expected TargetPaths
+	}{
+		{"URL set, prefix both, no force", TargetPathDescriptor{Kind: KindPage, Type: output.JSONFormat, URL: "/mydir/my.json", ForcePrefix: false, PrefixFilePath: "pf", PrefixLink: "pl"},
+			TargetPaths{TargetFilename: "/mydir/my.json", SubResourceBaseTarget: "/mydir", SubResourceBaseLink: "/mydir", Link: "/mydir/my.json"}},
+		{"URL set, prefix both, force", TargetPathDescriptor{Kind: KindPage, Type: output.JSONFormat, URL: "/mydir/my.json", ForcePrefix: true, PrefixFilePath: "pf", PrefixLink: "pl"},
+			TargetPaths{TargetFilename: "/pf/mydir/my.json", SubResourceBaseTarget: "/pf/mydir", SubResourceBaseLink: "/pl/mydir", Link: "/pl/mydir/my.json"}},
+	}
+
+	for i, test := range tests {
+		t.Run(fmt.Sprintf(test.name),
+			func(t *testing.T) {
+				test.d.PathSpec = pathSpec
+				expected := test.expected
+				expected.TargetFilename = filepath.FromSlash(expected.TargetFilename)
+				expected.SubResourceBaseTarget = filepath.FromSlash(expected.SubResourceBaseTarget)
+
+				pagePath := CreateTargetPaths(test.d)
+
+				if pagePath != expected {
+					t.Fatalf("[%d] [%s] targetPath expected\n%#v, got:\n%#v", i, test.name, expected, pagePath)
+				}
+			})
+	}
+
+}
+
+func eqTargetPaths(p1, p2 TargetPaths) bool {
+
+	if p1.Link != p2.Link {
+		return false
+	}
+
+	if p1.SubResourceBaseTarget != p2.SubResourceBaseTarget {
+		return false
+	}
+
+	if p1.TargetFilename != p2.TargetFilename {
+		return false
+	}
+
+	return true
+}
diff --git a/resources/page/page_wrappers.autogen.go b/resources/page/page_wrappers.autogen.go
new file mode 100644
index 00000000000..c08da3e8bb6
--- /dev/null
+++ b/resources/page/page_wrappers.autogen.go
@@ -0,0 +1,97 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file is autogenerated.
+
+package page
+
+import (
+	"github.com/gohugoio/hugo/common/hugo"
+	"github.com/gohugoio/hugo/helpers"
+	"html/template"
+	"os"
+)
+
+// NewDeprecatedWarningPage adds deprecation warnings to the given implementation.
+func NewDeprecatedWarningPage(p DeprecatedWarningPageMethods) DeprecatedWarningPageMethods {
+	return &pageDeprecated{p: p}
+}
+
+type pageDeprecated struct {
+	p DeprecatedWarningPageMethods
+}
+
+func (p *pageDeprecated) Filename() string {
+	helpers.Deprecated("Page", ".Filename", "Use .File.Filename", false)
+	return p.p.Filename()
+}
+func (p *pageDeprecated) Dir() string {
+	helpers.Deprecated("Page", ".Dir", "Use .File.Dir", false)
+	return p.p.Dir()
+}
+func (p *pageDeprecated) IsDraft() bool {
+	helpers.Deprecated("Page", ".IsDraft", "Use .Draft.", false)
+	return p.p.IsDraft()
+}
+func (p *pageDeprecated) Extension() string {
+	helpers.Deprecated("Page", ".Extension", "Use .File.Extension", false)
+	return p.p.Extension()
+}
+func (p *pageDeprecated) Hugo() hugo.Info {
+	helpers.Deprecated("Page", ".Hugo", "Use the global hugo function.", false)
+	return p.p.Hugo()
+}
+func (p *pageDeprecated) Ext() string {
+	helpers.Deprecated("Page", ".Ext", "Use .File.Ext", false)
+	return p.p.Ext()
+}
+func (p *pageDeprecated) LanguagePrefix() string {
+	helpers.Deprecated("Page", ".LanguagePrefix", "Use .Site.LanguagePrefix.", false)
+	return p.p.LanguagePrefix()
+}
+func (p *pageDeprecated) GetParam(arg0 string) interface{} {
+	helpers.Deprecated("Page", ".GetParam", "Use .Param or .Params.myParam.", false)
+	return p.p.GetParam(arg0)
+}
+func (p *pageDeprecated) LogicalName() string {
+	helpers.Deprecated("Page", ".LogicalName", "Use .File.LogicalName", false)
+	return p.p.LogicalName()
+}
+func (p *pageDeprecated) BaseFileName() string {
+	helpers.Deprecated("Page", ".BaseFileName", "Use .File.BaseFileName", false)
+	return p.p.BaseFileName()
+}
+func (p *pageDeprecated) RSSLink() template.URL {
+	helpers.Deprecated("Page", ".RSSLink", "Use the Output Format's link, e.g. something like: \n    {{ with .OutputFormats.Get \"RSS\" }}{{ . RelPermalink }}{{ end }}", false)
+	return p.p.RSSLink()
+}
+func (p *pageDeprecated) TranslationBaseName() string {
+	helpers.Deprecated("Page", ".TranslationBaseName", "Use .File.TranslationBaseName", false)
+	return p.p.TranslationBaseName()
+}
+func (p *pageDeprecated) URL() string {
+	helpers.Deprecated("Page", ".URL", "Use .Permalink or .RelPermalink. If what you want is the front matter URL value, use .Params.url", false)
+	return p.p.URL()
+}
+func (p *pageDeprecated) ContentBaseName() string {
+	helpers.Deprecated("Page", ".ContentBaseName", "Use .File.ContentBaseName", false)
+	return p.p.ContentBaseName()
+}
+func (p *pageDeprecated) UniqueID() string {
+	helpers.Deprecated("Page", ".UniqueID", "Use .File.UniqueID", false)
+	return p.p.UniqueID()
+}
+func (p *pageDeprecated) FileInfo() os.FileInfo {
+	helpers.Deprecated("Page", ".FileInfo", "Use .File.FileInfo", false)
+	return p.p.FileInfo()
+}
diff --git a/hugolib/pageGroup.go b/resources/page/pagegroup.go
similarity index 71%
rename from hugolib/pageGroup.go
rename to resources/page/pagegroup.go
index 8aaa1018c94..46d9bd17444 100644
--- a/hugolib/pageGroup.go
+++ b/resources/page/pagegroup.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -11,14 +11,23 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package hugolib
+package page
 
 import (
 	"errors"
+	"fmt"
 	"reflect"
 	"sort"
 	"strings"
 	"time"
+
+	"github.com/gohugoio/hugo/common/collections"
+
+	"github.com/gohugoio/hugo/resources/resource"
+)
+
+var (
+	_ collections.Slicer = PageGroup{}
 )
 
 // PageGroup represents a group of pages, grouped by the key.
@@ -80,7 +89,8 @@ func (p PagesGroup) Reverse() PagesGroup {
 
 var (
 	errorType   = reflect.TypeOf((*error)(nil)).Elem()
-	pagePtrType = reflect.TypeOf((*Page)(nil))
+	pagePtrType = reflect.TypeOf((*Page)(nil)).Elem()
+	pagesType   = reflect.TypeOf(Pages{})
 )
 
 // GroupBy groups by the value in the given field or method name and with the given order.
@@ -99,7 +109,7 @@ func (p Pages) GroupBy(key string, order ...string) (PagesGroup, error) {
 	var ft interface{}
 	m, ok := pagePtrType.MethodByName(key)
 	if ok {
-		if m.Type.NumIn() != 1 || m.Type.NumOut() == 0 || m.Type.NumOut() > 2 {
+		if m.Type.NumOut() == 0 || m.Type.NumOut() > 2 {
 			return nil, errors.New(key + " is a Page method but you can't use it with GroupBy")
 		}
 		if m.Type.NumOut() == 1 && m.Type.Out(0).Implements(errorType) {
@@ -119,9 +129,9 @@ func (p Pages) GroupBy(key string, order ...string) (PagesGroup, error) {
 	var tmp reflect.Value
 	switch e := ft.(type) {
 	case reflect.StructField:
-		tmp = reflect.MakeMap(reflect.MapOf(e.Type, reflect.SliceOf(pagePtrType)))
+		tmp = reflect.MakeMap(reflect.MapOf(e.Type, pagesType))
 	case reflect.Method:
-		tmp = reflect.MakeMap(reflect.MapOf(e.Type.Out(0), reflect.SliceOf(pagePtrType)))
+		tmp = reflect.MakeMap(reflect.MapOf(e.Type.Out(0), pagesType))
 	}
 
 	for _, e := range p {
@@ -137,7 +147,7 @@ func (p Pages) GroupBy(key string, order ...string) (PagesGroup, error) {
 			continue
 		}
 		if !tmp.MapIndex(fv).IsValid() {
-			tmp.SetMapIndex(fv, reflect.MakeSlice(reflect.SliceOf(pagePtrType), 0, 0))
+			tmp.SetMapIndex(fv, reflect.MakeSlice(pagesType, 0, 0))
 		}
 		tmp.SetMapIndex(fv, reflect.Append(tmp.MapIndex(fv), ppv))
 	}
@@ -145,7 +155,7 @@ func (p Pages) GroupBy(key string, order ...string) (PagesGroup, error) {
 	sortedKeys := sortKeys(tmp.MapKeys(), direction)
 	r := make([]PageGroup, len(sortedKeys))
 	for i, k := range sortedKeys {
-		r[i] = PageGroup{Key: k.Interface(), Pages: tmp.MapIndex(k).Interface().([]*Page)}
+		r[i] = PageGroup{Key: k.Interface(), Pages: tmp.MapIndex(k).Interface().(Pages)}
 	}
 
 	return r, nil
@@ -167,40 +177,41 @@ func (p Pages) GroupByParam(key string, order ...string) (PagesGroup, error) {
 	var tmp reflect.Value
 	var keyt reflect.Type
 	for _, e := range p {
-		param := e.getParamToLower(key)
+		param := resource.GetParamToLower(e, key)
 		if param != nil {
 			if _, ok := param.([]string); !ok {
 				keyt = reflect.TypeOf(param)
-				tmp = reflect.MakeMap(reflect.MapOf(keyt, reflect.SliceOf(pagePtrType)))
+				tmp = reflect.MakeMap(reflect.MapOf(keyt, pagesType))
 				break
 			}
 		}
 	}
 	if !tmp.IsValid() {
-		return nil, errors.New("There is no such a param")
+		return nil, errors.New("there is no such a param")
 	}
 
 	for _, e := range p {
-		param := e.getParam(key, false)
+		param := resource.GetParam(e, key)
+
 		if param == nil || reflect.TypeOf(param) != keyt {
 			continue
 		}
 		v := reflect.ValueOf(param)
 		if !tmp.MapIndex(v).IsValid() {
-			tmp.SetMapIndex(v, reflect.MakeSlice(reflect.SliceOf(pagePtrType), 0, 0))
+			tmp.SetMapIndex(v, reflect.MakeSlice(pagesType, 0, 0))
 		}
 		tmp.SetMapIndex(v, reflect.Append(tmp.MapIndex(v), reflect.ValueOf(e)))
 	}
 
 	var r []PageGroup
 	for _, k := range sortKeys(tmp.MapKeys(), direction) {
-		r = append(r, PageGroup{Key: k.Interface(), Pages: tmp.MapIndex(k).Interface().([]*Page)})
+		r = append(r, PageGroup{Key: k.Interface(), Pages: tmp.MapIndex(k).Interface().(Pages)})
 	}
 
 	return r, nil
 }
 
-func (p Pages) groupByDateField(sorter func(p Pages) Pages, formatter func(p *Page) string, order ...string) (PagesGroup, error) {
+func (p Pages) groupByDateField(sorter func(p Pages) Pages, formatter func(p Page) string, order ...string) (PagesGroup, error) {
 	if len(p) < 1 {
 		return nil, nil
 	}
@@ -211,14 +222,14 @@ func (p Pages) groupByDateField(sorter func(p Pages) Pages, formatter func(p *Pa
 		sp = sp.Reverse()
 	}
 
-	date := formatter(sp[0])
+	date := formatter(sp[0].(Page))
 	var r []PageGroup
 	r = append(r, PageGroup{Key: date, Pages: make(Pages, 0)})
 	r[0].Pages = append(r[0].Pages, sp[0])
 
 	i := 0
 	for _, e := range sp[1:] {
-		date = formatter(e)
+		date = formatter(e.(Page))
 		if r[i].Key.(string) != date {
 			r = append(r, PageGroup{Key: date})
 			i++
@@ -236,8 +247,8 @@ func (p Pages) GroupByDate(format string, order ...string) (PagesGroup, error) {
 	sorter := func(p Pages) Pages {
 		return p.ByDate()
 	}
-	formatter := func(p *Page) string {
-		return p.Date.Format(format)
+	formatter := func(p Page) string {
+		return p.Date().Format(format)
 	}
 	return p.groupByDateField(sorter, formatter, order...)
 }
@@ -250,8 +261,8 @@ func (p Pages) GroupByPublishDate(format string, order ...string) (PagesGroup, e
 	sorter := func(p Pages) Pages {
 		return p.ByPublishDate()
 	}
-	formatter := func(p *Page) string {
-		return p.PublishDate.Format(format)
+	formatter := func(p Page) string {
+		return p.PublishDate().Format(format)
 	}
 	return p.groupByDateField(sorter, formatter, order...)
 }
@@ -264,8 +275,8 @@ func (p Pages) GroupByExpiryDate(format string, order ...string) (PagesGroup, er
 	sorter := func(p Pages) Pages {
 		return p.ByExpiryDate()
 	}
-	formatter := func(p *Page) string {
-		return p.ExpiryDate.Format(format)
+	formatter := func(p Page) string {
+		return p.ExpiryDate().Format(format)
 	}
 	return p.groupByDateField(sorter, formatter, order...)
 }
@@ -278,21 +289,81 @@ func (p Pages) GroupByParamDate(key string, format string, order ...string) (Pag
 	sorter := func(p Pages) Pages {
 		var r Pages
 		for _, e := range p {
-			param := e.getParamToLower(key)
-			if param != nil {
-				if _, ok := param.(time.Time); ok {
-					r = append(r, e)
-				}
+			param := resource.GetParamToLower(e, key)
+			if _, ok := param.(time.Time); ok {
+				r = append(r, e)
 			}
 		}
-		pdate := func(p1, p2 *Page) bool {
-			return p1.getParamToLower(key).(time.Time).Unix() < p2.getParamToLower(key).(time.Time).Unix()
+		pdate := func(p1, p2 Page) bool {
+			p1p, p2p := p1.(Page), p2.(Page)
+			return resource.GetParamToLower(p1p, key).(time.Time).Unix() < resource.GetParamToLower(p2p, key).(time.Time).Unix()
 		}
 		pageBy(pdate).Sort(r)
 		return r
 	}
-	formatter := func(p *Page) string {
-		return p.getParamToLower(key).(time.Time).Format(format)
+	formatter := func(p Page) string {
+		return resource.GetParamToLower(p, key).(time.Time).Format(format)
 	}
 	return p.groupByDateField(sorter, formatter, order...)
 }
+
+// Slice is not meant to be used externally. It's a bridge function
+// for the template functions. See collections.Slice.
+func (p PageGroup) Slice(in interface{}) (interface{}, error) {
+	switch items := in.(type) {
+	case PageGroup:
+		return items, nil
+	case []interface{}:
+		groups := make(PagesGroup, len(items))
+		for i, v := range items {
+			g, ok := v.(PageGroup)
+			if !ok {
+				return nil, fmt.Errorf("type %T is not a PageGroup", v)
+			}
+			groups[i] = g
+		}
+		return groups, nil
+	default:
+		return nil, fmt.Errorf("invalid slice type %T", items)
+	}
+}
+
+// Len returns the number of pages in the page group.
+func (psg PagesGroup) Len() int {
+	l := 0
+	for _, pg := range psg {
+		l += len(pg.Pages)
+	}
+	return l
+}
+
+// ToPagesGroup tries to convert seq into a PagesGroup.
+func ToPagesGroup(seq interface{}) (PagesGroup, error) {
+	switch v := seq.(type) {
+	case nil:
+		return nil, nil
+	case PagesGroup:
+		return v, nil
+	case []PageGroup:
+		return PagesGroup(v), nil
+	case []interface{}:
+		l := len(v)
+		if l == 0 {
+			break
+		}
+		switch v[0].(type) {
+		case PageGroup:
+			pagesGroup := make(PagesGroup, l)
+			for i, ipg := range v {
+				if pg, ok := ipg.(PageGroup); ok {
+					pagesGroup[i] = pg
+				} else {
+					return nil, fmt.Errorf("unsupported type in paginate from slice, got %T instead of PageGroup", ipg)
+				}
+			}
+			return pagesGroup, nil
+		}
+	}
+
+	return nil, nil
+}
diff --git a/hugolib/pageGroup_test.go b/resources/page/pagegroup_test.go
similarity index 83%
rename from hugolib/pageGroup_test.go
rename to resources/page/pagegroup_test.go
index febcb3c1c0d..51ac090345d 100644
--- a/hugolib/pageGroup_test.go
+++ b/resources/page/pagegroup_test.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -11,15 +11,15 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package hugolib
+package page
 
 import (
-	"errors"
-	"path/filepath"
 	"reflect"
+	"strings"
 	"testing"
 
 	"github.com/spf13/cast"
+	"github.com/stretchr/testify/require"
 )
 
 type pageGroupTestObject struct {
@@ -38,17 +38,17 @@ var pageGroupTestSources = []pageGroupTestObject{
 }
 
 func preparePageGroupTestPages(t *testing.T) Pages {
-	s := newTestSite(t)
 	var pages Pages
 	for _, src := range pageGroupTestSources {
-		p, err := s.NewPage(filepath.FromSlash(src.path))
-		if err != nil {
-			t.Fatalf("failed to prepare test page %s", src.path)
+		p := newTestPage()
+		p.path = src.path
+		if p.path != "" {
+			p.section = strings.Split(strings.TrimPrefix(p.path, "/"), "/")[0]
 		}
-		p.Weight = src.weight
-		p.Date = cast.ToTime(src.date)
-		p.PublishDate = cast.ToTime(src.date)
-		p.ExpiryDate = cast.ToTime(src.date)
+		p.weight = src.weight
+		p.date = cast.ToTime(src.date)
+		p.pubDate = cast.ToTime(src.date)
+		p.expiryDate = cast.ToTime(src.date)
 		p.params["custom_param"] = src.param
 		p.params["custom_date"] = cast.ToTime(src.date)
 		pages = append(pages, p)
@@ -104,7 +104,7 @@ func TestGroupByWithSectionArg(t *testing.T) {
 		t.Fatalf("Unable to make PagesGroup array: %s", err)
 	}
 	if !reflect.DeepEqual(groups, expect) {
-		t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+		t.Errorf("PagesGroup has unexpected groups. It should be\n%#v, got\n%#v", expect, groups)
 	}
 }
 
@@ -138,52 +138,10 @@ func TestGroupByCalledWithEmptyPages(t *testing.T) {
 	}
 }
 
-func TestGroupByCalledWithUnavailableKey(t *testing.T) {
+func TestGroupByParamCalledWithUnavailableKey(t *testing.T) {
 	t.Parallel()
 	pages := preparePageGroupTestPages(t)
-	_, err := pages.GroupBy("UnavailableKey")
-	if err == nil {
-		t.Errorf("GroupByParam should return an error but didn't")
-	}
-}
-
-func (page *Page) DummyPageMethodWithArgForTest(s string) string {
-	return s
-}
-
-func (page *Page) DummyPageMethodReturnThreeValueForTest() (string, string, string) {
-	return "foo", "bar", "baz"
-}
-
-func (page *Page) DummyPageMethodReturnErrorOnlyForTest() error {
-	return errors.New("some error occurred")
-}
-
-func (page *Page) dummyPageMethodReturnTwoValueForTest() (string, string) {
-	return "foo", "bar"
-}
-
-func TestGroupByCalledWithInvalidMethod(t *testing.T) {
-	t.Parallel()
-	var err error
-	pages := preparePageGroupTestPages(t)
-
-	_, err = pages.GroupBy("DummyPageMethodWithArgForTest")
-	if err == nil {
-		t.Errorf("GroupByParam should return an error but didn't")
-	}
-
-	_, err = pages.GroupBy("DummyPageMethodReturnThreeValueForTest")
-	if err == nil {
-		t.Errorf("GroupByParam should return an error but didn't")
-	}
-
-	_, err = pages.GroupBy("DummyPageMethodReturnErrorOnlyForTest")
-	if err == nil {
-		t.Errorf("GroupByParam should return an error but didn't")
-	}
-
-	_, err = pages.GroupBy("DummyPageMethodReturnTwoValueForTest")
+	_, err := pages.GroupByParam("UnavailableKey")
 	if err == nil {
 		t.Errorf("GroupByParam should return an error but didn't")
 	}
@@ -246,31 +204,25 @@ func TestGroupByParamInReverseOrder(t *testing.T) {
 }
 
 func TestGroupByParamCalledWithCapitalLetterString(t *testing.T) {
+	assert := require.New(t)
 	testStr := "TestString"
-	f := "/section1/test_capital.md"
-	s := newTestSite(t)
-	p, err := s.NewPage(filepath.FromSlash(f))
-	if err != nil {
-		t.Fatalf("failed to prepare test page %s", f)
-	}
+	p := newTestPage()
 	p.params["custom_param"] = testStr
 	pages := Pages{p}
 
 	groups, err := pages.GroupByParam("custom_param")
-	if err != nil {
-		t.Fatalf("Unable to make PagesGroup array: %s", err)
-	}
-	if groups[0].Key != testStr {
-		t.Errorf("PagesGroup key is converted to a lower character string. It should be %#v, got %#v", testStr, groups[0].Key)
-	}
+
+	assert.NoError(err)
+	assert.Equal(testStr, groups[0].Key)
+
 }
 
 func TestGroupByParamCalledWithSomeUnavailableParams(t *testing.T) {
 	t.Parallel()
 	pages := preparePageGroupTestPages(t)
-	delete(pages[1].params, "custom_param")
-	delete(pages[3].params, "custom_param")
-	delete(pages[4].params, "custom_param")
+	delete(pages[1].Params(), "custom_param")
+	delete(pages[3].Params(), "custom_param")
+	delete(pages[4].Params(), "custom_param")
 
 	expect := PagesGroup{
 		{Key: "foo", Pages: Pages{pages[0], pages[2]}},
diff --git a/hugolib/pagemeta/page_frontmatter.go b/resources/page/pagemeta/page_frontmatter.go
similarity index 97%
rename from hugolib/pagemeta/page_frontmatter.go
rename to resources/page/pagemeta/page_frontmatter.go
index b67ffbc05a0..1ce3fbee4fc 100644
--- a/hugolib/pagemeta/page_frontmatter.go
+++ b/resources/page/pagemeta/page_frontmatter.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -19,6 +19,7 @@ import (
 
 	"github.com/gohugoio/hugo/common/loggers"
 	"github.com/gohugoio/hugo/helpers"
+	"github.com/gohugoio/hugo/resources/resource"
 
 	"github.com/gohugoio/hugo/config"
 	"github.com/spf13/cast"
@@ -63,7 +64,7 @@ type FrontMatterDescriptor struct {
 	Params map[string]interface{}
 
 	// This is the Page's dates.
-	Dates *PageDates
+	Dates *resource.Dates
 
 	// This is the Page's Slug etc.
 	PageURLs *URLPath
@@ -264,7 +265,7 @@ func toLowerSlice(in interface{}) []string {
 func NewFrontmatterHandler(logger *loggers.Logger, cfg config.Provider) (FrontMatterHandler, error) {
 
 	if logger == nil {
-		logger = loggers.NewWarningLogger()
+		logger = loggers.NewErrorLogger()
 	}
 
 	frontMatterConfig, err := newFrontmatterConfig(cfg)
@@ -300,7 +301,7 @@ func (f *FrontMatterHandler) createHandlers() error {
 
 	if f.dateHandler, err = f.createDateHandler(f.fmConfig.date,
 		func(d *FrontMatterDescriptor, t time.Time) {
-			d.Dates.Date = t
+			d.Dates.FDate = t
 			setParamIfNotSet(fmDate, t, d)
 		}); err != nil {
 		return err
@@ -309,7 +310,7 @@ func (f *FrontMatterHandler) createHandlers() error {
 	if f.lastModHandler, err = f.createDateHandler(f.fmConfig.lastmod,
 		func(d *FrontMatterDescriptor, t time.Time) {
 			setParamIfNotSet(fmLastmod, t, d)
-			d.Dates.Lastmod = t
+			d.Dates.FLastmod = t
 		}); err != nil {
 		return err
 	}
@@ -317,7 +318,7 @@ func (f *FrontMatterHandler) createHandlers() error {
 	if f.publishDateHandler, err = f.createDateHandler(f.fmConfig.publishDate,
 		func(d *FrontMatterDescriptor, t time.Time) {
 			setParamIfNotSet(fmPubDate, t, d)
-			d.Dates.PublishDate = t
+			d.Dates.FPublishDate = t
 		}); err != nil {
 		return err
 	}
@@ -325,7 +326,7 @@ func (f *FrontMatterHandler) createHandlers() error {
 	if f.expiryDateHandler, err = f.createDateHandler(f.fmConfig.expiryDate,
 		func(d *FrontMatterDescriptor, t time.Time) {
 			setParamIfNotSet(fmExpiryDate, t, d)
-			d.Dates.ExpiryDate = t
+			d.Dates.FExpiryDate = t
 		}); err != nil {
 		return err
 	}
diff --git a/hugolib/pagemeta/page_frontmatter_test.go b/resources/page/pagemeta/page_frontmatter_test.go
similarity index 88%
rename from hugolib/pagemeta/page_frontmatter_test.go
rename to resources/page/pagemeta/page_frontmatter_test.go
index 03f4c2f84a4..313f704d992 100644
--- a/hugolib/pagemeta/page_frontmatter_test.go
+++ b/resources/page/pagemeta/page_frontmatter_test.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -19,6 +19,7 @@ import (
 	"testing"
 	"time"
 
+	"github.com/gohugoio/hugo/resources/resource"
 	"github.com/spf13/viper"
 
 	"github.com/stretchr/testify/require"
@@ -50,13 +51,13 @@ func TestDateAndSlugFromBaseFilename(t *testing.T) {
 	}
 
 	for i, test := range tests {
-		expectedDate, err := time.Parse("2006-01-02", test.date)
+		expecteFDate, err := time.Parse("2006-01-02", test.date)
 		assert.NoError(err)
 
 		errMsg := fmt.Sprintf("Test %d", i)
 		gotDate, gotSlug := dateAndSlugFromBaseFilename(test.name)
 
-		assert.Equal(expectedDate, gotDate, errMsg)
+		assert.Equal(expecteFDate, gotDate, errMsg)
 		assert.Equal(test.slug, gotSlug, errMsg)
 
 	}
@@ -66,7 +67,7 @@ func newTestFd() *FrontMatterDescriptor {
 	return &FrontMatterDescriptor{
 		Frontmatter: make(map[string]interface{}),
 		Params:      make(map[string]interface{}),
-		Dates:       &PageDates{},
+		Dates:       &resource.Dates{},
 		PageURLs:    &URLPath{},
 	}
 }
@@ -143,13 +144,13 @@ func TestFrontMatterDatesHandlers(t *testing.T) {
 		}
 		d.Frontmatter["date"] = d2
 		assert.NoError(handler.HandleDates(d))
-		assert.Equal(d1, d.Dates.Date)
+		assert.Equal(d1, d.Dates.FDate)
 		assert.Equal(d2, d.Params["date"])
 
 		d = newTestFd()
 		d.Frontmatter["date"] = d2
 		assert.NoError(handler.HandleDates(d))
-		assert.Equal(d2, d.Dates.Date)
+		assert.Equal(d2, d.Dates.FDate)
 		assert.Equal(d2, d.Params["date"])
 
 	}
@@ -186,15 +187,15 @@ func TestFrontMatterDatesCustomConfig(t *testing.T) {
 
 	assert.NoError(handler.HandleDates(d))
 
-	assert.Equal(1, d.Dates.Date.Day())
-	assert.Equal(4, d.Dates.Lastmod.Day())
-	assert.Equal(4, d.Dates.PublishDate.Day())
-	assert.Equal(5, d.Dates.ExpiryDate.Day())
+	assert.Equal(1, d.Dates.FDate.Day())
+	assert.Equal(4, d.Dates.FLastmod.Day())
+	assert.Equal(4, d.Dates.FPublishDate.Day())
+	assert.Equal(5, d.Dates.FExpiryDate.Day())
 
-	assert.Equal(d.Dates.Date, d.Params["date"])
-	assert.Equal(d.Dates.Date, d.Params["mydate"])
-	assert.Equal(d.Dates.PublishDate, d.Params["publishdate"])
-	assert.Equal(d.Dates.ExpiryDate, d.Params["expirydate"])
+	assert.Equal(d.Dates.FDate, d.Params["date"])
+	assert.Equal(d.Dates.FDate, d.Params["mydate"])
+	assert.Equal(d.Dates.FPublishDate, d.Params["publishdate"])
+	assert.Equal(d.Dates.FExpiryDate, d.Params["expirydate"])
 
 	assert.False(handler.IsDateKey("date")) // This looks odd, but is configured like this.
 	assert.True(handler.IsDateKey("mydate"))
@@ -227,10 +228,10 @@ func TestFrontMatterDatesDefaultKeyword(t *testing.T) {
 
 	assert.NoError(handler.HandleDates(d))
 
-	assert.Equal(1, d.Dates.Date.Day())
-	assert.Equal(2, d.Dates.Lastmod.Day())
-	assert.Equal(4, d.Dates.PublishDate.Day())
-	assert.True(d.Dates.ExpiryDate.IsZero())
+	assert.Equal(1, d.Dates.FDate.Day())
+	assert.Equal(2, d.Dates.FLastmod.Day())
+	assert.Equal(4, d.Dates.FPublishDate.Day())
+	assert.True(d.Dates.FExpiryDate.IsZero())
 
 }
 
@@ -252,10 +253,10 @@ func TestFrontMatterDateFieldHandler(t *testing.T) {
 	fd := newTestFd()
 	d, _ := time.Parse("2006-01-02", "2018-02-01")
 	fd.Frontmatter["date"] = d
-	h := handlers.newDateFieldHandler("date", func(d *FrontMatterDescriptor, t time.Time) { d.Dates.Date = t })
+	h := handlers.newDateFieldHandler("date", func(d *FrontMatterDescriptor, t time.Time) { d.Dates.FDate = t })
 
 	handled, err := h(fd)
 	assert.True(handled)
 	assert.NoError(err)
-	assert.Equal(d, fd.Dates.Date)
+	assert.Equal(d, fd.Dates.FDate)
 }
diff --git a/hugolib/pagemeta/pagemeta.go b/resources/page/pagemeta/pagemeta.go
similarity index 77%
rename from hugolib/pagemeta/pagemeta.go
rename to resources/page/pagemeta/pagemeta.go
index 93dc9a12f0b..07e5c5673b9 100644
--- a/hugolib/pagemeta/pagemeta.go
+++ b/resources/page/pagemeta/pagemeta.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -13,20 +13,9 @@
 
 package pagemeta
 
-import (
-	"time"
-)
-
 type URLPath struct {
 	URL       string
 	Permalink string
 	Slug      string
 	Section   string
 }
-
-type PageDates struct {
-	Date        time.Time
-	Lastmod     time.Time
-	PublishDate time.Time
-	ExpiryDate  time.Time
-}
diff --git a/resources/page/pages.go b/resources/page/pages.go
new file mode 100644
index 00000000000..1f79932a9cb
--- /dev/null
+++ b/resources/page/pages.go
@@ -0,0 +1,115 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+	"fmt"
+	"math/rand"
+
+	"github.com/gohugoio/hugo/resources/resource"
+)
+
+var (
+	_ resource.ResourcesConverter = Pages{}
+)
+
+// Pages is a slice of pages. This is the most common list type in Hugo.
+type Pages []Page
+
+func (ps Pages) String() string {
+	return fmt.Sprintf("Pages(%d)", len(ps))
+}
+
+// Used in tests.
+func (ps Pages) shuffle() {
+	for i := range ps {
+		j := rand.Intn(i + 1)
+		ps[i], ps[j] = ps[j], ps[i]
+	}
+}
+
+// ToResources wraps resource.ResourcesConverter
+func (pages Pages) ToResources() resource.Resources {
+	r := make(resource.Resources, len(pages))
+	for i, p := range pages {
+		r[i] = p
+	}
+	return r
+}
+
+// ToPages tries to convert seq into Pages.
+func ToPages(seq interface{}) (Pages, error) {
+	if seq == nil {
+		return Pages{}, nil
+	}
+
+	switch v := seq.(type) {
+	case Pages:
+		return v, nil
+	case *Pages:
+		return *(v), nil
+	case WeightedPages:
+		return v.Pages(), nil
+	case PageGroup:
+		return v.Pages, nil
+	case []interface{}:
+		pages := make(Pages, len(v))
+		success := true
+		for i, vv := range v {
+			p, ok := vv.(Page)
+			if !ok {
+				success = false
+				break
+			}
+			pages[i] = p
+		}
+		if success {
+			return pages, nil
+		}
+	}
+
+	return nil, fmt.Errorf("cannot convert type %T to Pages", seq)
+}
+
+func (p Pages) Group(key interface{}, in interface{}) (interface{}, error) {
+	pages, err := ToPages(in)
+	if err != nil {
+		return nil, err
+	}
+	return PageGroup{Key: key, Pages: pages}, nil
+}
+
+// Len returns the number of pages in the list.
+func (p Pages) Len() int {
+	return len(p)
+}
+
+func (ps Pages) removeFirstIfFound(p Page) Pages {
+	ii := -1
+	for i, pp := range ps {
+		if p.Eq(pp) {
+			ii = i
+			break
+		}
+	}
+
+	if ii != -1 {
+		ps = append(ps[:ii], ps[ii+1:]...)
+	}
+	return ps
+}
+
+// PagesFactory somehow creates some Pages.
+// We do a lot of lazy Pages initialization in Hugo, so we need a type.
+type PagesFactory func() Pages
diff --git a/hugolib/pageCache.go b/resources/page/pages_cache.go
similarity index 97%
rename from hugolib/pageCache.go
rename to resources/page/pages_cache.go
index 485da4ba3e4..e82d9a8cf98 100644
--- a/hugolib/pageCache.go
+++ b/resources/page/pages_cache.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -11,7 +11,7 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package hugolib
+package page
 
 import (
 	"sync"
diff --git a/hugolib/pageCache_test.go b/resources/page/pages_cache_test.go
similarity index 85%
rename from hugolib/pageCache_test.go
rename to resources/page/pages_cache_test.go
index 48f595f8690..b8328340878 100644
--- a/hugolib/pageCache_test.go
+++ b/resources/page/pages_cache_test.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -11,7 +11,7 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package hugolib
+package page
 
 import (
 	"strconv"
@@ -27,7 +27,7 @@ func TestPageCache(t *testing.T) {
 	c1 := newPageCache()
 
 	changeFirst := func(p Pages) {
-		p[0].Description = "changed"
+		p[0].(*testPage).description = "changed"
 	}
 
 	var o1 uint64
@@ -40,10 +40,8 @@ func TestPageCache(t *testing.T) {
 
 	var testPageSets []Pages
 
-	s := newTestSite(t)
-
 	for i := 0; i < 50; i++ {
-		testPageSets = append(testPageSets, createSortTestPages(s, i+1))
+		testPageSets = append(testPageSets, createSortTestPages(i+1))
 	}
 
 	for j := 0; j < 100; j++ {
@@ -66,7 +64,7 @@ func TestPageCache(t *testing.T) {
 				assert.Equal(t, !atomic.CompareAndSwapUint64(&o2, uint64(k), uint64(k+1)), c3)
 				l2.Unlock()
 				assert.NotNil(t, p3)
-				assert.Equal(t, p3[0].Description, "changed")
+				assert.Equal(t, p3[0].(*testPage).description, "changed")
 			}
 		}()
 	}
@@ -77,7 +75,7 @@ func BenchmarkPageCache(b *testing.B) {
 	cache := newPageCache()
 	pages := make(Pages, 30)
 	for i := 0; i < 30; i++ {
-		pages[i] = &Page{title: "p" + strconv.Itoa(i)}
+		pages[i] = &testPage{title: "p" + strconv.Itoa(i)}
 	}
 	key := "key"
 
diff --git a/hugolib/pages_language_merge.go b/resources/page/pages_language_merge.go
similarity index 94%
rename from hugolib/pages_language_merge.go
rename to resources/page/pages_language_merge.go
index 8bbae9a1271..11393a75404 100644
--- a/hugolib/pages_language_merge.go
+++ b/resources/page/pages_language_merge.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -11,7 +11,7 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package hugolib
+package page
 
 import (
 	"fmt"
@@ -42,7 +42,7 @@ func (p1 Pages) MergeByLanguage(p2 Pages) Pages {
 			}
 		}
 
-		pages.sort()
+		SortByDefault(*pages)
 	}
 
 	out, _ := spc.getP("pages.MergeByLanguage", merge, p1, p2)
diff --git a/hugolib/pagesPrevNext.go b/resources/page/pages_prev_next.go
similarity index 77%
rename from hugolib/pagesPrevNext.go
rename to resources/page/pages_prev_next.go
index 947a49b8581..9293c98746d 100644
--- a/hugolib/pagesPrevNext.go
+++ b/resources/page/pages_prev_next.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -11,10 +11,10 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package hugolib
+package page
 
-// Prev returns the previous page reletive to the given page.
-func (p Pages) Prev(cur *Page) *Page {
+// Prev returns the previous page reletive to the given
+func (p Pages) Prev(cur Page) Page {
 	for x, c := range p {
 		if c.Eq(cur) {
 			if x == 0 {
@@ -27,8 +27,8 @@ func (p Pages) Prev(cur *Page) *Page {
 	return nil
 }
 
-// Next returns the next page reletive to the given page.
-func (p Pages) Next(cur *Page) *Page {
+// Next returns the next page reletive to the given
+func (p Pages) Next(cur Page) Page {
 	for x, c := range p {
 		if c.Eq(cur) {
 			if x < len(p)-1 {
diff --git a/hugolib/pagesPrevNext_test.go b/resources/page/pages_prev_next_test.go
similarity index 85%
rename from hugolib/pagesPrevNext_test.go
rename to resources/page/pages_prev_next_test.go
index 5945d8fe50b..c39ad060367 100644
--- a/hugolib/pagesPrevNext_test.go
+++ b/resources/page/pages_prev_next_test.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -11,7 +11,7 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package hugolib
+package page
 
 import (
 	"testing"
@@ -51,18 +51,15 @@ func TestNext(t *testing.T) {
 }
 
 func prepareWeightedPagesPrevNext(t *testing.T) WeightedPages {
-	s := newTestSite(t)
 	w := WeightedPages{}
 
 	for _, src := range pagePNTestSources {
-		p, err := s.NewPage(src.path)
-		if err != nil {
-			t.Fatalf("failed to prepare test page %s", src.path)
-		}
-		p.Weight = src.weight
-		p.Date = cast.ToTime(src.date)
-		p.PublishDate = cast.ToTime(src.date)
-		w = append(w, WeightedPage{p.Weight, p})
+		p := newTestPage()
+		p.path = src.path
+		p.weight = src.weight
+		p.date = cast.ToTime(src.date)
+		p.pubDate = cast.ToTime(src.date)
+		w = append(w, WeightedPage{Weight: p.weight, Page: p})
 	}
 
 	w.Sort()
diff --git a/hugolib/pages_related.go b/resources/page/pages_related.go
similarity index 81%
rename from hugolib/pages_related.go
rename to resources/page/pages_related.go
index 2881a45e6e3..1a4386135d0 100644
--- a/hugolib/pages_related.go
+++ b/resources/page/pages_related.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -11,13 +11,14 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package hugolib
+package page
 
 import (
 	"sync"
 
 	"github.com/gohugoio/hugo/common/types"
 	"github.com/gohugoio/hugo/related"
+	"github.com/pkg/errors"
 	"github.com/spf13/cast"
 )
 
@@ -28,7 +29,7 @@ var (
 )
 
 // A PageGenealogist finds related pages in a page collection. This interface is implemented
-// by Pages and PageGroup, which makes it available as `{{ .RegularPages.Related . }}` etc.
+// by Pages and PageGroup, which makes it available as `{{ .RegularRelated . }}` etc.
 type PageGenealogist interface {
 
 	// Template example:
@@ -47,27 +48,22 @@ type PageGenealogist interface {
 // Related searches all the configured indices with the search keywords from the
 // supplied document.
 func (p Pages) Related(doc related.Document) (Pages, error) {
-	page, err := unwrapPage(doc)
+	result, err := p.searchDoc(doc)
 	if err != nil {
 		return nil, err
 	}
 
-	result, err := p.searchDoc(page)
-	if err != nil {
-		return nil, err
+	if page, ok := doc.(Page); ok {
+		return result.removeFirstIfFound(page), nil
 	}
 
-	return result.removeFirstIfFound(page), nil
+	return result, nil
+
 }
 
 // RelatedIndices searches the given indices with the search keywords from the
 // supplied document.
 func (p Pages) RelatedIndices(doc related.Document, indices ...interface{}) (Pages, error) {
-	page, err := unwrapPage(doc)
-	if err != nil {
-		return nil, err
-	}
-
 	indicesStr, err := cast.ToStringSliceE(indices)
 	if err != nil {
 		return nil, err
@@ -78,7 +74,11 @@ func (p Pages) RelatedIndices(doc related.Document, indices ...interface{}) (Pag
 		return nil, err
 	}
 
-	return result.removeFirstIfFound(page), nil
+	if page, ok := doc.(Page); ok {
+		return result.removeFirstIfFound(page), nil
+	}
+
+	return result, nil
 
 }
 
@@ -110,7 +110,12 @@ func (p Pages) withInvertedIndex(search func(idx *related.InvertedIndex) ([]rela
 		return nil, nil
 	}
 
-	cache := p[0].s.relatedDocsHandler
+	d, ok := p[0].(InternalDependencies)
+	if !ok {
+		return nil, errors.Errorf("invalid type %T in related serch", p[0])
+	}
+
+	cache := d.GetRelatedDocsHandler()
 
 	searchIndex, err := cache.getOrCreateIndex(p)
 	if err != nil {
@@ -125,7 +130,7 @@ func (p Pages) withInvertedIndex(search func(idx *related.InvertedIndex) ([]rela
 	if len(result) > 0 {
 		mp := make(Pages, len(result))
 		for i, match := range result {
-			mp[i] = match.(*Page)
+			mp[i] = match.(Page)
 		}
 		return mp, nil
 	}
@@ -139,20 +144,23 @@ type cachedPostingList struct {
 	postingList *related.InvertedIndex
 }
 
-type relatedDocsHandler struct {
-	// This is configured in site or langugage config.
+type RelatedDocsHandler struct {
 	cfg related.Config
 
 	postingLists []*cachedPostingList
 	mu           sync.RWMutex
 }
 
-func newSearchIndexHandler(cfg related.Config) *relatedDocsHandler {
-	return &relatedDocsHandler{cfg: cfg}
+func NewRelatedDocsHandler(cfg related.Config) *RelatedDocsHandler {
+	return &RelatedDocsHandler{cfg: cfg}
+}
+
+func (s *RelatedDocsHandler) Clone() *RelatedDocsHandler {
+	return NewRelatedDocsHandler(s.cfg)
 }
 
 // This assumes that a lock has been acquired.
-func (s *relatedDocsHandler) getIndex(p Pages) *related.InvertedIndex {
+func (s *RelatedDocsHandler) getIndex(p Pages) *related.InvertedIndex {
 	for _, ci := range s.postingLists {
 		if pagesEqual(p, ci.p) {
 			return ci.postingList
@@ -161,7 +169,7 @@ func (s *relatedDocsHandler) getIndex(p Pages) *related.InvertedIndex {
 	return nil
 }
 
-func (s *relatedDocsHandler) getOrCreateIndex(p Pages) (*related.InvertedIndex, error) {
+func (s *RelatedDocsHandler) getOrCreateIndex(p Pages) (*related.InvertedIndex, error) {
 	s.mu.RLock()
 	cachedIndex := s.getIndex(p)
 	if cachedIndex != nil {
diff --git a/resources/page/pages_related_test.go b/resources/page/pages_related_test.go
new file mode 100644
index 00000000000..016b492c865
--- /dev/null
+++ b/resources/page/pages_related_test.go
@@ -0,0 +1,86 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+	"testing"
+	"time"
+
+	"github.com/gohugoio/hugo/common/types"
+
+	"github.com/stretchr/testify/require"
+)
+
+func TestRelated(t *testing.T) {
+	assert := require.New(t)
+
+	t.Parallel()
+
+	pages := Pages{
+		&testPage{
+			title:   "Page 1",
+			pubDate: mustParseDate("2017-01-03"),
+			params: map[string]interface{}{
+				"keywords": []string{"hugo", "says"},
+			},
+		},
+		&testPage{
+			title:   "Page 2",
+			pubDate: mustParseDate("2017-01-02"),
+			params: map[string]interface{}{
+				"keywords": []string{"hugo", "rocks"},
+			},
+		},
+		&testPage{
+			title:   "Page 3",
+			pubDate: mustParseDate("2017-01-01"),
+			params: map[string]interface{}{
+				"keywords": []string{"bep", "says"},
+			},
+		},
+	}
+
+	result, err := pages.RelatedTo(types.NewKeyValuesStrings("keywords", "hugo", "rocks"))
+
+	assert.NoError(err)
+	assert.Len(result, 2)
+	assert.Equal("Page 2", result[0].Title())
+	assert.Equal("Page 1", result[1].Title())
+
+	result, err = pages.Related(pages[0])
+	assert.NoError(err)
+	assert.Len(result, 2)
+	assert.Equal("Page 2", result[0].Title())
+	assert.Equal("Page 3", result[1].Title())
+
+	result, err = pages.RelatedIndices(pages[0], "keywords")
+	assert.NoError(err)
+	assert.Len(result, 2)
+	assert.Equal("Page 2", result[0].Title())
+	assert.Equal("Page 3", result[1].Title())
+
+	result, err = pages.RelatedTo(types.NewKeyValuesStrings("keywords", "bep", "rocks"))
+	assert.NoError(err)
+	assert.Len(result, 2)
+	assert.Equal("Page 2", result[0].Title())
+	assert.Equal("Page 3", result[1].Title())
+}
+
+func mustParseDate(s string) time.Time {
+	d, err := time.Parse("2006-01-02", s)
+	if err != nil {
+		panic(err)
+	}
+	return d
+}
diff --git a/hugolib/pageSort.go b/resources/page/pages_sort.go
similarity index 77%
rename from hugolib/pageSort.go
rename to resources/page/pages_sort.go
index 454beb473e9..eb3a28247f9 100644
--- a/hugolib/pageSort.go
+++ b/resources/page/pages_sort.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -11,13 +11,13 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package hugolib
+package page
 
 import (
-	"github.com/gohugoio/hugo/helpers"
-
 	"sort"
 
+	"github.com/gohugoio/hugo/resources/resource"
+
 	"github.com/spf13/cast"
 )
 
@@ -34,7 +34,7 @@ type pageSorter struct {
 }
 
 // pageBy is a closure used in the Sort.Less method.
-type pageBy func(p1, p2 *Page) bool
+type pageBy func(p1, p2 Page) bool
 
 // Sort stable sorts the pages given the receiver's sort order.
 func (by pageBy) Sort(pages Pages) {
@@ -45,39 +45,45 @@ func (by pageBy) Sort(pages Pages) {
 	sort.Stable(ps)
 }
 
-// defaultPageSort is the default sort for pages in Hugo:
+// DefaultPageSort is the default sort func for pages in Hugo:
 // Order by Weight, Date, LinkTitle and then full file path.
-var defaultPageSort = func(p1, p2 *Page) bool {
-	if p1.Weight == p2.Weight {
-		if p1.Date.Unix() == p2.Date.Unix() {
+var DefaultPageSort = func(p1, p2 Page) bool {
+	if p1.Weight() == p2.Weight() {
+		if p1.Date().Unix() == p2.Date().Unix() {
 			if p1.LinkTitle() == p2.LinkTitle() {
-				return (p1.FullFilePath() < p2.FullFilePath())
+				if p1.File() == nil || p2.File() == nil {
+					return p1.File() == nil
+				}
+				return p1.File().Filename() < p2.File().Filename()
 			}
 			return (p1.LinkTitle() < p2.LinkTitle())
 		}
-		return p1.Date.Unix() > p2.Date.Unix()
+		return p1.Date().Unix() > p2.Date().Unix()
 	}
 
-	if p2.Weight == 0 {
+	if p2.Weight() == 0 {
 		return true
 	}
 
-	if p1.Weight == 0 {
+	if p1.Weight() == 0 {
 		return false
 	}
 
-	return p1.Weight < p2.Weight
+	return p1.Weight() < p2.Weight()
 }
 
-var languagePageSort = func(p1, p2 *Page) bool {
+var languagePageSort = func(p1, p2 Page) bool {
+
 	if p1.Language().Weight == p2.Language().Weight {
-		if p1.Date.Unix() == p2.Date.Unix() {
+		if p1.Date().Unix() == p2.Date().Unix() {
 			if p1.LinkTitle() == p2.LinkTitle() {
-				return (p1.FullFilePath() < p2.FullFilePath())
+				if p1.File() != nil && p2.File() != nil {
+					return p1.File().Filename() < p2.File().Filename()
+				}
 			}
 			return (p1.LinkTitle() < p2.LinkTitle())
 		}
-		return p1.Date.Unix() > p2.Date.Unix()
+		return p1.Date().Unix() > p2.Date().Unix()
 	}
 
 	if p2.Language().Weight == 0 {
@@ -97,18 +103,6 @@ func (ps *pageSorter) Swap(i, j int) { ps.pages[i], ps.pages[j] = ps.pages[j], p
 // Less is part of sort.Interface. It is implemented by calling the "by" closure in the sorter.
 func (ps *pageSorter) Less(i, j int) bool { return ps.by(ps.pages[i], ps.pages[j]) }
 
-// Sort sorts the pages by the default sort order defined:
-// Order by Weight, Date, LinkTitle and then full file path.
-func (p Pages) Sort() {
-	// Remove in Hugo 0.51
-	helpers.Deprecated("Pages", "Sort", "Use .ByWeight", true)
-	p.sort()
-}
-
-func (p Pages) sort() {
-	pageBy(defaultPageSort).Sort(p)
-}
-
 // Limit limits the number of pages returned to n.
 func (p Pages) Limit(n int) Pages {
 	if len(p) > n {
@@ -124,10 +118,15 @@ func (p Pages) Limit(n int) Pages {
 // This may safely be executed  in parallel.
 func (p Pages) ByWeight() Pages {
 	const key = "pageSort.ByWeight"
-	pages, _ := spc.get(key, pageBy(defaultPageSort).Sort, p)
+	pages, _ := spc.get(key, pageBy(DefaultPageSort).Sort, p)
 	return pages
 }
 
+// SortByDefault sorts pages by the default sort.
+func SortByDefault(pages Pages) {
+	pageBy(DefaultPageSort).Sort(pages)
+}
+
 // ByTitle sorts the Pages by title and returns a copy.
 //
 // Adjacent invocations on the same receiver will return a cached result.
@@ -137,8 +136,8 @@ func (p Pages) ByTitle() Pages {
 
 	const key = "pageSort.ByTitle"
 
-	title := func(p1, p2 *Page) bool {
-		return p1.title < p2.title
+	title := func(p1, p2 Page) bool {
+		return p1.Title() < p2.Title()
 	}
 
 	pages, _ := spc.get(key, pageBy(title).Sort, p)
@@ -154,7 +153,7 @@ func (p Pages) ByLinkTitle() Pages {
 
 	const key = "pageSort.ByLinkTitle"
 
-	linkTitle := func(p1, p2 *Page) bool {
+	linkTitle := func(p1, p2 Page) bool {
 		return p1.LinkTitle() < p2.LinkTitle()
 	}
 
@@ -172,8 +171,8 @@ func (p Pages) ByDate() Pages {
 
 	const key = "pageSort.ByDate"
 
-	date := func(p1, p2 *Page) bool {
-		return p1.Date.Unix() < p2.Date.Unix()
+	date := func(p1, p2 Page) bool {
+		return p1.Date().Unix() < p2.Date().Unix()
 	}
 
 	pages, _ := spc.get(key, pageBy(date).Sort, p)
@@ -190,8 +189,8 @@ func (p Pages) ByPublishDate() Pages {
 
 	const key = "pageSort.ByPublishDate"
 
-	pubDate := func(p1, p2 *Page) bool {
-		return p1.PublishDate.Unix() < p2.PublishDate.Unix()
+	pubDate := func(p1, p2 Page) bool {
+		return p1.PublishDate().Unix() < p2.PublishDate().Unix()
 	}
 
 	pages, _ := spc.get(key, pageBy(pubDate).Sort, p)
@@ -208,8 +207,8 @@ func (p Pages) ByExpiryDate() Pages {
 
 	const key = "pageSort.ByExpiryDate"
 
-	expDate := func(p1, p2 *Page) bool {
-		return p1.ExpiryDate.Unix() < p2.ExpiryDate.Unix()
+	expDate := func(p1, p2 Page) bool {
+		return p1.ExpiryDate().Unix() < p2.ExpiryDate().Unix()
 	}
 
 	pages, _ := spc.get(key, pageBy(expDate).Sort, p)
@@ -226,8 +225,8 @@ func (p Pages) ByLastmod() Pages {
 
 	const key = "pageSort.ByLastmod"
 
-	date := func(p1, p2 *Page) bool {
-		return p1.Lastmod.Unix() < p2.Lastmod.Unix()
+	date := func(p1, p2 Page) bool {
+		return p1.Lastmod().Unix() < p2.Lastmod().Unix()
 	}
 
 	pages, _ := spc.get(key, pageBy(date).Sort, p)
@@ -244,8 +243,20 @@ func (p Pages) ByLength() Pages {
 
 	const key = "pageSort.ByLength"
 
-	length := func(p1, p2 *Page) bool {
-		return len(p1.content()) < len(p2.content())
+	length := func(p1, p2 Page) bool {
+
+		p1l, ok1 := p1.(resource.LengthProvider)
+		p2l, ok2 := p2.(resource.LengthProvider)
+
+		if !ok1 {
+			return true
+		}
+
+		if !ok2 {
+			return false
+		}
+
+		return p1l.Len() < p2l.Len()
 	}
 
 	pages, _ := spc.get(key, pageBy(length).Sort, p)
@@ -267,6 +278,11 @@ func (p Pages) ByLanguage() Pages {
 	return pages
 }
 
+// SortByLanguage sorts the pages by language.
+func SortByLanguage(pages Pages) {
+	pageBy(languagePageSort).Sort(pages)
+}
+
 // Reverse reverses the order in Pages and returns a copy.
 //
 // Adjacent invocations on the same receiver will return a cached result.
@@ -295,7 +311,7 @@ func (p Pages) ByParam(paramsKey interface{}) Pages {
 	paramsKeyStr := cast.ToString(paramsKey)
 	key := "pageSort.ByParam." + paramsKeyStr
 
-	paramsKeyComparator := func(p1, p2 *Page) bool {
+	paramsKeyComparator := func(p1, p2 Page) bool {
 		v1, _ := p1.Param(paramsKeyStr)
 		v2, _ := p2.Param(paramsKeyStr)
 
diff --git a/hugolib/pageSort_test.go b/resources/page/pages_sort_test.go
similarity index 72%
rename from hugolib/pageSort_test.go
rename to resources/page/pages_sort_test.go
index 915947fd3af..c781de2f335 100644
--- a/hugolib/pageSort_test.go
+++ b/resources/page/pages_sort_test.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -11,14 +11,15 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package hugolib
+package page
 
 import (
 	"fmt"
-	"path/filepath"
 	"testing"
 	"time"
 
+	"github.com/gohugoio/hugo/resources/resource"
+
 	"github.com/stretchr/testify/assert"
 	"github.com/stretchr/testify/require"
 )
@@ -30,30 +31,28 @@ func TestDefaultSort(t *testing.T) {
 	d3 := d1.Add(-2 * time.Hour)
 	d4 := d1.Add(-3 * time.Hour)
 
-	s := newTestSite(t)
-
-	p := createSortTestPages(s, 4)
+	p := createSortTestPages(4)
 
 	// first by weight
 	setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "a", "c", "d"}, [4]int{4, 3, 2, 1}, p)
-	p.sort()
+	SortByDefault(p)
 
-	assert.Equal(t, 1, p[0].Weight)
+	assert.Equal(t, 1, p[0].Weight())
 
 	// Consider zero weight, issue #2673
 	setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "a", "d", "c"}, [4]int{0, 0, 0, 1}, p)
-	p.sort()
+	SortByDefault(p)
 
-	assert.Equal(t, 1, p[0].Weight)
+	assert.Equal(t, 1, p[0].Weight())
 
 	// next by date
 	setSortVals([4]time.Time{d3, d4, d1, d2}, [4]string{"a", "b", "c", "d"}, [4]int{1, 1, 1, 1}, p)
-	p.sort()
-	assert.Equal(t, d1, p[0].Date)
+	SortByDefault(p)
+	assert.Equal(t, d1, p[0].Date())
 
 	// finally by link title
 	setSortVals([4]time.Time{d3, d3, d3, d3}, [4]string{"b", "c", "a", "d"}, [4]int{1, 1, 1, 1}, p)
-	p.sort()
+	SortByDefault(p)
 	assert.Equal(t, "al", p[0].LinkTitle())
 	assert.Equal(t, "bl", p[1].LinkTitle())
 	assert.Equal(t, "cl", p[2].LinkTitle())
@@ -63,17 +62,18 @@ func TestDefaultSort(t *testing.T) {
 func TestSortByLinkTitle(t *testing.T) {
 	t.Parallel()
 	assert := require.New(t)
-	s := newTestSite(t)
-	pages := createSortTestPages(s, 6)
+	pages := createSortTestPages(6)
 
 	for i, p := range pages {
+		pp := p.(*testPage)
 		if i < 5 {
-			p.title = fmt.Sprintf("title%d", i)
+			pp.title = fmt.Sprintf("title%d", i)
 		}
 
 		if i > 2 {
-			p.linkTitle = fmt.Sprintf("linkTitle%d", i)
+			pp.linkTitle = fmt.Sprintf("linkTitle%d", i)
 		}
+
 	}
 
 	pages.shuffle()
@@ -92,26 +92,25 @@ func TestSortByLinkTitle(t *testing.T) {
 
 func TestSortByN(t *testing.T) {
 	t.Parallel()
-	s := newTestSite(t)
 	d1 := time.Now()
 	d2 := d1.Add(-2 * time.Hour)
 	d3 := d1.Add(-10 * time.Hour)
 	d4 := d1.Add(-20 * time.Hour)
 
-	p := createSortTestPages(s, 4)
+	p := createSortTestPages(4)
 
 	for i, this := range []struct {
 		sortFunc   func(p Pages) Pages
 		assertFunc func(p Pages) bool
 	}{
-		{(Pages).ByWeight, func(p Pages) bool { return p[0].Weight == 1 }},
-		{(Pages).ByTitle, func(p Pages) bool { return p[0].title == "ab" }},
+		{(Pages).ByWeight, func(p Pages) bool { return p[0].Weight() == 1 }},
+		{(Pages).ByTitle, func(p Pages) bool { return p[0].Title() == "ab" }},
 		{(Pages).ByLinkTitle, func(p Pages) bool { return p[0].LinkTitle() == "abl" }},
-		{(Pages).ByDate, func(p Pages) bool { return p[0].Date == d4 }},
-		{(Pages).ByPublishDate, func(p Pages) bool { return p[0].PublishDate == d4 }},
-		{(Pages).ByExpiryDate, func(p Pages) bool { return p[0].ExpiryDate == d4 }},
-		{(Pages).ByLastmod, func(p Pages) bool { return p[1].Lastmod == d3 }},
-		{(Pages).ByLength, func(p Pages) bool { return p[0].content() == "b_content" }},
+		{(Pages).ByDate, func(p Pages) bool { return p[0].Date() == d4 }},
+		{(Pages).ByPublishDate, func(p Pages) bool { return p[0].PublishDate() == d4 }},
+		{(Pages).ByExpiryDate, func(p Pages) bool { return p[0].ExpiryDate() == d4 }},
+		{(Pages).ByLastmod, func(p Pages) bool { return p[1].Lastmod() == d3 }},
+		{(Pages).ByLength, func(p Pages) bool { return p[0].(resource.LengthProvider).Len() == len(p[0].(*testPage).content) }},
 	} {
 		setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "ab", "cde", "fg"}, [4]int{0, 3, 2, 1}, p)
 
@@ -125,8 +124,7 @@ func TestSortByN(t *testing.T) {
 
 func TestLimit(t *testing.T) {
 	t.Parallel()
-	s := newTestSite(t)
-	p := createSortTestPages(s, 10)
+	p := createSortTestPages(10)
 	firstFive := p.Limit(5)
 	assert.Equal(t, 5, len(firstFive))
 	for i := 0; i < 5; i++ {
@@ -138,13 +136,12 @@ func TestLimit(t *testing.T) {
 
 func TestPageSortReverse(t *testing.T) {
 	t.Parallel()
-	s := newTestSite(t)
-	p1 := createSortTestPages(s, 10)
-	assert.Equal(t, 0, p1[0].fuzzyWordCount)
-	assert.Equal(t, 9, p1[9].fuzzyWordCount)
+	p1 := createSortTestPages(10)
+	assert.Equal(t, 0, p1[0].(*testPage).fuzzyWordCount)
+	assert.Equal(t, 9, p1[9].(*testPage).fuzzyWordCount)
 	p2 := p1.Reverse()
-	assert.Equal(t, 9, p2[0].fuzzyWordCount)
-	assert.Equal(t, 0, p2[9].fuzzyWordCount)
+	assert.Equal(t, 9, p2[0].(*testPage).fuzzyWordCount)
+	assert.Equal(t, 0, p2[9].(*testPage).fuzzyWordCount)
 	// cached
 	assert.True(t, pagesEqual(p2, p1.Reverse()))
 }
@@ -152,10 +149,9 @@ func TestPageSortReverse(t *testing.T) {
 func TestPageSortByParam(t *testing.T) {
 	t.Parallel()
 	var k interface{} = "arbitrarily.nested"
-	s := newTestSite(t)
 
-	unsorted := createSortTestPages(s, 10)
-	delete(unsorted[9].params, "arbitrarily")
+	unsorted := createSortTestPages(10)
+	delete(unsorted[9].Params(), "arbitrarily")
 
 	firstSetValue, _ := unsorted[0].Param(k)
 	secondSetValue, _ := unsorted[1].Param(k)
@@ -182,23 +178,22 @@ func TestPageSortByParam(t *testing.T) {
 func TestPageSortByParamNumeric(t *testing.T) {
 	t.Parallel()
 	var k interface{} = "arbitrarily.nested"
-	s := newTestSite(t)
 
 	n := 10
-	unsorted := createSortTestPages(s, n)
+	unsorted := createSortTestPages(n)
 	for i := 0; i < n; i++ {
 		v := 100 - i
 		if i%2 == 0 {
 			v = 100.0 - i
 		}
 
-		unsorted[i].params = map[string]interface{}{
+		unsorted[i].(*testPage).params = map[string]interface{}{
 			"arbitrarily": map[string]interface{}{
 				"nested": v,
 			},
 		}
 	}
-	delete(unsorted[9].params, "arbitrarily")
+	delete(unsorted[9].Params(), "arbitrarily")
 
 	firstSetValue, _ := unsorted[0].Param(k)
 	secondSetValue, _ := unsorted[1].Param(k)
@@ -223,8 +218,7 @@ func TestPageSortByParamNumeric(t *testing.T) {
 }
 
 func BenchmarkSortByWeightAndReverse(b *testing.B) {
-	s := newTestSite(b)
-	p := createSortTestPages(s, 300)
+	p := createSortTestPages(300)
 
 	b.ResetTimer()
 	for i := 0; i < b.N; i++ {
@@ -234,31 +228,35 @@ func BenchmarkSortByWeightAndReverse(b *testing.B) {
 
 func setSortVals(dates [4]time.Time, titles [4]string, weights [4]int, pages Pages) {
 	for i := range dates {
-		pages[i].Date = dates[i]
-		pages[i].Lastmod = dates[i]
-		pages[i].Weight = weights[i]
-		pages[i].title = titles[i]
+		this := pages[i].(*testPage)
+		other := pages[len(dates)-1-i].(*testPage)
+
+		this.date = dates[i]
+		this.lastMod = dates[i]
+		this.weight = weights[i]
+		this.title = titles[i]
 		// make sure we compare apples and ... apples ...
-		pages[len(dates)-1-i].linkTitle = pages[i].title + "l"
-		pages[len(dates)-1-i].PublishDate = dates[i]
-		pages[len(dates)-1-i].ExpiryDate = dates[i]
-		pages[len(dates)-1-i].workContent = []byte(titles[i] + "_content")
+		other.linkTitle = this.Title() + "l"
+		other.pubDate = dates[i]
+		other.expiryDate = dates[i]
+		other.content = titles[i] + "_content"
 	}
-	lastLastMod := pages[2].Lastmod
-	pages[2].Lastmod = pages[1].Lastmod
-	pages[1].Lastmod = lastLastMod
+	lastLastMod := pages[2].Lastmod()
+	pages[2].(*testPage).lastMod = pages[1].Lastmod()
+	pages[1].(*testPage).lastMod = lastLastMod
 
 	for _, p := range pages {
-		p.resetContent()
+		p.(*testPage).content = ""
 	}
 
 }
 
-func createSortTestPages(s *Site, num int) Pages {
+func createSortTestPages(num int) Pages {
 	pages := make(Pages, num)
 
 	for i := 0; i < num; i++ {
-		p := s.newPage(filepath.FromSlash(fmt.Sprintf("/x/y/p%d.md", i)))
+		p := newTestPage()
+		p.path = fmt.Sprintf("/x/y/p%d.md", i)
 		p.params = map[string]interface{}{
 			"arbitrarily": map[string]interface{}{
 				"nested": ("xyz" + fmt.Sprintf("%v", 100-i)),
@@ -271,8 +269,8 @@ func createSortTestPages(s *Site, num int) Pages {
 			w = 10
 		}
 		p.fuzzyWordCount = i
-		p.Weight = w
-		p.Description = "initial"
+		p.weight = w
+		p.description = "initial"
 
 		pages[i] = p
 	}
diff --git a/hugolib/pagination.go b/resources/page/pagination.go
similarity index 56%
rename from hugolib/pagination.go
rename to resources/page/pagination.go
index 05846a6bb35..6d5da966ecf 100644
--- a/hugolib/pagination.go
+++ b/resources/page/pagination.go
@@ -1,4 +1,4 @@
-// Copyright 2015 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -11,7 +11,7 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-package hugolib
+package page
 
 import (
 	"errors"
@@ -19,18 +19,23 @@ import (
 	"html/template"
 	"math"
 	"reflect"
-	"strings"
 
 	"github.com/gohugoio/hugo/config"
 
 	"github.com/spf13/cast"
 )
 
+// PaginatorProvider provides two ways to create a page paginator.
+type PaginatorProvider interface {
+	Paginator(options ...interface{}) (*Pager, error)
+	Paginate(seq interface{}, options ...interface{}) (*Pager, error)
+}
+
 // Pager represents one of the elements in a paginator.
 // The number, starting on 1, represents its place.
 type Pager struct {
 	number int
-	*paginator
+	*Paginator
 }
 
 func (p Pager) String() string {
@@ -41,20 +46,6 @@ type paginatedElement interface {
 	Len() int
 }
 
-// Len returns the number of pages in the list.
-func (p Pages) Len() int {
-	return len(p)
-}
-
-// Len returns the number of pages in the page group.
-func (psg PagesGroup) Len() int {
-	l := 0
-	for _, pg := range psg {
-		l += len(pg.Pages)
-	}
-	return l
-}
-
 type pagers []*Pager
 
 var (
@@ -62,14 +53,12 @@ var (
 	paginatorEmptyPageGroups PagesGroup
 )
 
-type paginator struct {
+type Paginator struct {
 	paginatedElements []paginatedElement
 	pagers
 	paginationURLFactory
-	total   int
-	size    int
-	source  interface{}
-	options []interface{}
+	total int
+	size  int
 }
 
 type paginationURLFactory func(int) string
@@ -120,7 +109,7 @@ func (p *Pager) element() paginatedElement {
 }
 
 // page returns the Page with the given index
-func (p *Pager) page(index int) (*Page, error) {
+func (p *Pager) page(index int) (Page, error) {
 
 	if pages, ok := p.element().(Pages); ok {
 		if pages != nil && len(pages) > index {
@@ -188,22 +177,22 @@ func (p *Pager) Last() *Pager {
 }
 
 // Pagers returns a list of pagers that can be used to build a pagination menu.
-func (p *paginator) Pagers() pagers {
+func (p *Paginator) Pagers() pagers {
 	return p.pagers
 }
 
 // PageSize returns the size of each paginator page.
-func (p *paginator) PageSize() int {
+func (p *Paginator) PageSize() int {
 	return p.size
 }
 
 // TotalPages returns the number of pages in the paginator.
-func (p *paginator) TotalPages() int {
+func (p *Paginator) TotalPages() int {
 	return len(p.paginatedElements)
 }
 
 // TotalNumberOfElements returns the number of elements on all pages in this paginator.
-func (p *paginator) TotalNumberOfElements() int {
+func (p *Paginator) TotalNumberOfElements() int {
 	return p.total
 }
 
@@ -221,7 +210,7 @@ func splitPageGroups(pageGroups PagesGroup, size int) []paginatedElement {
 
 	type keyPage struct {
 		key  interface{}
-		page *Page
+		page Page
 	}
 
 	var (
@@ -261,117 +250,7 @@ func splitPageGroups(pageGroups PagesGroup, size int) []paginatedElement {
 	return split
 }
 
-// Paginator get this Page's main output's paginator.
-func (p *Page) Paginator(options ...interface{}) (*Pager, error) {
-	return p.mainPageOutput.Paginator(options...)
-}
-
-// Paginator gets this PageOutput's paginator if it's already created.
-// If it's not, one will be created with all pages in Data["Pages"].
-func (p *PageOutput) Paginator(options ...interface{}) (*Pager, error) {
-	if !p.IsNode() {
-		return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind, p.title)
-	}
-	pagerSize, err := resolvePagerSize(p.s.Cfg, options...)
-
-	if err != nil {
-		return nil, err
-	}
-
-	var initError error
-
-	p.paginatorInit.Do(func() {
-		if p.paginator != nil {
-			return
-		}
-
-		pathDescriptor := p.targetPathDescriptor
-		if p.s.owner.IsMultihost() {
-			pathDescriptor.LangPrefix = ""
-		}
-		pagers, err := paginatePages(pathDescriptor, p.data["Pages"], pagerSize)
-
-		if err != nil {
-			initError = err
-		}
-
-		if len(pagers) > 0 {
-			// the rest of the nodes will be created later
-			p.paginator = pagers[0]
-			p.paginator.source = "paginator"
-			p.paginator.options = options
-		}
-
-	})
-
-	if initError != nil {
-		return nil, initError
-	}
-
-	return p.paginator, nil
-}
-
-// Paginate invokes this Page's main output's Paginate method.
-func (p *Page) Paginate(seq interface{}, options ...interface{}) (*Pager, error) {
-	return p.mainPageOutput.Paginate(seq, options...)
-}
-
-// Paginate gets this PageOutput's paginator if it's already created.
-// If it's not, one will be created with the qiven sequence.
-// Note that repeated calls will return the same result, even if the sequence is different.
-func (p *PageOutput) Paginate(seq interface{}, options ...interface{}) (*Pager, error) {
-	if !p.IsNode() {
-		return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind, p.title)
-	}
-
-	pagerSize, err := resolvePagerSize(p.s.Cfg, options...)
-
-	if err != nil {
-		return nil, err
-	}
-
-	var initError error
-
-	p.paginatorInit.Do(func() {
-		if p.paginator != nil {
-			return
-		}
-
-		pathDescriptor := p.targetPathDescriptor
-		if p.s.owner.IsMultihost() {
-			pathDescriptor.LangPrefix = ""
-		}
-		pagers, err := paginatePages(pathDescriptor, seq, pagerSize)
-
-		if err != nil {
-			initError = err
-		}
-
-		if len(pagers) > 0 {
-			// the rest of the nodes will be created later
-			p.paginator = pagers[0]
-			p.paginator.source = seq
-			p.paginator.options = options
-		}
-
-	})
-
-	if initError != nil {
-		return nil, initError
-	}
-
-	if p.paginator.source == "paginator" {
-		return nil, errors.New("a Paginator was previously built for this Node without filters; look for earlier .Paginator usage")
-	}
-
-	if !reflect.DeepEqual(options, p.paginator.options) || !probablyEqualPageLists(p.paginator.source, seq) {
-		return nil, errors.New("invoked multiple times with different arguments")
-	}
-
-	return p.paginator, nil
-}
-
-func resolvePagerSize(cfg config.Provider, options ...interface{}) (int, error) {
+func ResolvePagerSize(cfg config.Provider, options ...interface{}) (int, error) {
 	if len(options) == 0 {
 		return cfg.GetInt("paginate"), nil
 	}
@@ -389,7 +268,7 @@ func resolvePagerSize(cfg config.Provider, options ...interface{}) (int, error)
 	return pas, nil
 }
 
-func paginatePages(td targetPathDescriptor, seq interface{}, pagerSize int) (pagers, error) {
+func Paginate(td TargetPathDescriptor, seq interface{}, pagerSize int) (*Paginator, error) {
 
 	if pagerSize <= 0 {
 		return nil, errors.New("'paginate' configuration setting must be positive to paginate")
@@ -397,90 +276,23 @@ func paginatePages(td targetPathDescriptor, seq interface{}, pagerSize int) (pag
 
 	urlFactory := newPaginationURLFactory(td)
 
-	var paginator *paginator
+	var paginator *Paginator
 
-	groups, err := toPagesGroup(seq)
+	groups, err := ToPagesGroup(seq)
 	if err != nil {
 		return nil, err
 	}
 	if groups != nil {
 		paginator, _ = newPaginatorFromPageGroups(groups, pagerSize, urlFactory)
 	} else {
-		pages, err := toPages(seq)
+		pages, err := ToPages(seq)
 		if err != nil {
 			return nil, err
 		}
 		paginator, _ = newPaginatorFromPages(pages, pagerSize, urlFactory)
 	}
 
-	pagers := paginator.Pagers()
-
-	return pagers, nil
-}
-
-func toPagesGroup(seq interface{}) (PagesGroup, error) {
-	switch v := seq.(type) {
-	case nil:
-		return nil, nil
-	case PagesGroup:
-		return v, nil
-	case []PageGroup:
-		return PagesGroup(v), nil
-	case []interface{}:
-		l := len(v)
-		if l == 0 {
-			break
-		}
-		switch v[0].(type) {
-		case PageGroup:
-			pagesGroup := make(PagesGroup, l)
-			for i, ipg := range v {
-				if pg, ok := ipg.(PageGroup); ok {
-					pagesGroup[i] = pg
-				} else {
-					return nil, fmt.Errorf("unsupported type in paginate from slice, got %T instead of PageGroup", ipg)
-				}
-			}
-			return PagesGroup(pagesGroup), nil
-		}
-	}
-
-	return nil, nil
-}
-
-func toPages(seq interface{}) (Pages, error) {
-	if seq == nil {
-		return Pages{}, nil
-	}
-
-	switch v := seq.(type) {
-	case Pages:
-		return v, nil
-	case *Pages:
-		return *(v), nil
-	case []*Page:
-		return Pages(v), nil
-	case WeightedPages:
-		return v.Pages(), nil
-	case PageGroup:
-		return v.Pages, nil
-	case []interface{}:
-		pages := make(Pages, len(v))
-		success := true
-		for i, vv := range v {
-			p, ok := vv.(*Page)
-			if !ok {
-				success = false
-				break
-			}
-			pages[i] = p
-		}
-		if success {
-			return pages, nil
-		}
-	}
-
-	return nil, fmt.Errorf("cannot convert type %T to Pages", seq)
+	return paginator, nil
 }
 
 // probablyEqual checks page lists for probable equality.
@@ -515,8 +327,8 @@ func probablyEqualPageLists(a1 interface{}, a2 interface{}) bool {
 		return g1[0].Pages[0] == g2[0].Pages[0]
 	}
 
-	p1, err1 := toPages(a1)
-	p2, err2 := toPages(a2)
+	p1, err1 := ToPages(a1)
+	p2, err2 := ToPages(a2)
 
 	// probably the same wrong type
 	if err1 != nil && err2 != nil {
@@ -534,7 +346,7 @@ func probablyEqualPageLists(a1 interface{}, a2 interface{}) bool {
 	return p1[0] == p2[0]
 }
 
-func newPaginatorFromPages(pages Pages, size int, urlFactory paginationURLFactory) (*paginator, error) {
+func newPaginatorFromPages(pages Pages, size int, urlFactory paginationURLFactory) (*Paginator, error) {
 
 	if size <= 0 {
 		return nil, errors.New("Paginator size must be positive")
@@ -545,7 +357,7 @@ func newPaginatorFromPages(pages Pages, size int, urlFactory paginationURLFactor
 	return newPaginator(split, len(pages), size, urlFactory)
 }
 
-func newPaginatorFromPageGroups(pageGroups PagesGroup, size int, urlFactory paginationURLFactory) (*paginator, error) {
+func newPaginatorFromPageGroups(pageGroups PagesGroup, size int, urlFactory paginationURLFactory) (*Paginator, error) {
 
 	if size <= 0 {
 		return nil, errors.New("Paginator size must be positive")
@@ -556,19 +368,19 @@ func newPaginatorFromPageGroups(pageGroups PagesGroup, size int, urlFactory pagi
 	return newPaginator(split, pageGroups.Len(), size, urlFactory)
 }
 
-func newPaginator(elements []paginatedElement, total, size int, urlFactory paginationURLFactory) (*paginator, error) {
-	p := &paginator{total: total, paginatedElements: elements, size: size, paginationURLFactory: urlFactory}
+func newPaginator(elements []paginatedElement, total, size int, urlFactory paginationURLFactory) (*Paginator, error) {
+	p := &Paginator{total: total, paginatedElements: elements, size: size, paginationURLFactory: urlFactory}
 
 	var ps pagers
 
 	if len(elements) > 0 {
 		ps = make(pagers, len(elements))
 		for i := range p.paginatedElements {
-			ps[i] = &Pager{number: (i + 1), paginator: p}
+			ps[i] = &Pager{number: (i + 1), Paginator: p}
 		}
 	} else {
 		ps = make(pagers, 1)
-		ps[0] = &Pager{number: 1, paginator: p}
+		ps[0] = &Pager{number: 1, Paginator: p}
 	}
 
 	p.pagers = ps
@@ -576,20 +388,17 @@ func newPaginator(elements []paginatedElement, total, size int, urlFactory pagin
 	return p, nil
 }
 
-func newPaginationURLFactory(d targetPathDescriptor) paginationURLFactory {
+func newPaginationURLFactory(d TargetPathDescriptor) paginationURLFactory {
 
-	return func(page int) string {
+	return func(pageNumber int) string {
 		pathDescriptor := d
 		var rel string
-		if page > 1 {
-			rel = fmt.Sprintf("/%s/%d/", d.PathSpec.PaginatePath, page)
+		if pageNumber > 1 {
+			rel = fmt.Sprintf("/%s/%d/", d.PathSpec.PaginatePath, pageNumber)
 			pathDescriptor.Addends = rel
 		}
 
-		targetPath := createTargetPath(pathDescriptor)
-		targetPath = strings.TrimSuffix(targetPath, d.Type.BaseFilename())
-		link := d.PathSpec.PrependBasePath(targetPath, false)
-		// Note: The targetPath is massaged with MakePathSanitized
-		return d.PathSpec.URLizeFilename(link)
+		return CreateTargetPaths(pathDescriptor).RelPermalink(d.PathSpec)
+
 	}
 }
diff --git a/resources/page/pagination_test.go b/resources/page/pagination_test.go
new file mode 100644
index 00000000000..1308d60d199
--- /dev/null
+++ b/resources/page/pagination_test.go
@@ -0,0 +1,307 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+	"fmt"
+	"html/template"
+	"testing"
+
+	"github.com/spf13/viper"
+
+	"github.com/gohugoio/hugo/output"
+	"github.com/stretchr/testify/require"
+)
+
+func TestSplitPages(t *testing.T) {
+	t.Parallel()
+
+	pages := createTestPages(21)
+	chunks := splitPages(pages, 5)
+	require.Equal(t, 5, len(chunks))
+
+	for i := 0; i < 4; i++ {
+		require.Equal(t, 5, chunks[i].Len())
+	}
+
+	lastChunk := chunks[4]
+	require.Equal(t, 1, lastChunk.Len())
+
+}
+
+func TestSplitPageGroups(t *testing.T) {
+	t.Parallel()
+	pages := createTestPages(21)
+	groups, _ := pages.GroupBy("Weight", "desc")
+	chunks := splitPageGroups(groups, 5)
+	require.Equal(t, 5, len(chunks))
+
+	firstChunk := chunks[0]
+
+	// alternate weight 5 and 10
+	if groups, ok := firstChunk.(PagesGroup); ok {
+		require.Equal(t, 5, groups.Len())
+		for _, pg := range groups {
+			// first group 10 in weight
+			require.Equal(t, 10, pg.Key)
+			for _, p := range pg.Pages {
+				require.True(t, p.FuzzyWordCount()%2 == 0) // magic test
+			}
+		}
+	} else {
+		t.Fatal("Excepted PageGroup")
+	}
+
+	lastChunk := chunks[4]
+
+	if groups, ok := lastChunk.(PagesGroup); ok {
+		require.Equal(t, 1, groups.Len())
+		for _, pg := range groups {
+			// last should have 5 in weight
+			require.Equal(t, 5, pg.Key)
+			for _, p := range pg.Pages {
+				require.True(t, p.FuzzyWordCount()%2 != 0) // magic test
+			}
+		}
+	} else {
+		t.Fatal("Excepted PageGroup")
+	}
+
+}
+
+func TestPager(t *testing.T) {
+	t.Parallel()
+	pages := createTestPages(21)
+	groups, _ := pages.GroupBy("Weight", "desc")
+
+	urlFactory := func(page int) string {
+		return fmt.Sprintf("page/%d/", page)
+	}
+
+	_, err := newPaginatorFromPages(pages, -1, urlFactory)
+	require.NotNil(t, err)
+
+	_, err = newPaginatorFromPageGroups(groups, -1, urlFactory)
+	require.NotNil(t, err)
+
+	pag, err := newPaginatorFromPages(pages, 5, urlFactory)
+	require.Nil(t, err)
+	doTestPages(t, pag)
+	first := pag.Pagers()[0].First()
+	require.Equal(t, "Pager 1", first.String())
+	require.NotEmpty(t, first.Pages())
+	require.Empty(t, first.PageGroups())
+
+	pag, err = newPaginatorFromPageGroups(groups, 5, urlFactory)
+	require.Nil(t, err)
+	doTestPages(t, pag)
+	first = pag.Pagers()[0].First()
+	require.NotEmpty(t, first.PageGroups())
+	require.Empty(t, first.Pages())
+
+}
+
+func doTestPages(t *testing.T, paginator *Paginator) {
+
+	paginatorPages := paginator.Pagers()
+
+	require.Equal(t, 5, len(paginatorPages))
+	require.Equal(t, 21, paginator.TotalNumberOfElements())
+	require.Equal(t, 5, paginator.PageSize())
+	require.Equal(t, 5, paginator.TotalPages())
+
+	first := paginatorPages[0]
+	require.Equal(t, template.HTML("page/1/"), first.URL())
+	require.Equal(t, first, first.First())
+	require.True(t, first.HasNext())
+	require.Equal(t, paginatorPages[1], first.Next())
+	require.False(t, first.HasPrev())
+	require.Nil(t, first.Prev())
+	require.Equal(t, 5, first.NumberOfElements())
+	require.Equal(t, 1, first.PageNumber())
+
+	third := paginatorPages[2]
+	require.True(t, third.HasNext())
+	require.True(t, third.HasPrev())
+	require.Equal(t, paginatorPages[1], third.Prev())
+
+	last := paginatorPages[4]
+	require.Equal(t, template.HTML("page/5/"), last.URL())
+	require.Equal(t, last, last.Last())
+	require.False(t, last.HasNext())
+	require.Nil(t, last.Next())
+	require.True(t, last.HasPrev())
+	require.Equal(t, 1, last.NumberOfElements())
+	require.Equal(t, 5, last.PageNumber())
+}
+
+func TestPagerNoPages(t *testing.T) {
+	t.Parallel()
+	pages := createTestPages(0)
+	groups, _ := pages.GroupBy("Weight", "desc")
+
+	urlFactory := func(page int) string {
+		return fmt.Sprintf("page/%d/", page)
+	}
+
+	paginator, _ := newPaginatorFromPages(pages, 5, urlFactory)
+	doTestPagerNoPages(t, paginator)
+
+	first := paginator.Pagers()[0].First()
+	require.Empty(t, first.PageGroups())
+	require.Empty(t, first.Pages())
+
+	paginator, _ = newPaginatorFromPageGroups(groups, 5, urlFactory)
+	doTestPagerNoPages(t, paginator)
+
+	first = paginator.Pagers()[0].First()
+	require.Empty(t, first.PageGroups())
+	require.Empty(t, first.Pages())
+
+}
+
+func doTestPagerNoPages(t *testing.T, paginator *Paginator) {
+	paginatorPages := paginator.Pagers()
+
+	require.Equal(t, 1, len(paginatorPages))
+	require.Equal(t, 0, paginator.TotalNumberOfElements())
+	require.Equal(t, 5, paginator.PageSize())
+	require.Equal(t, 0, paginator.TotalPages())
+
+	// pageOne should be nothing but the first
+	pageOne := paginatorPages[0]
+	require.NotNil(t, pageOne.First())
+	require.False(t, pageOne.HasNext())
+	require.False(t, pageOne.HasPrev())
+	require.Nil(t, pageOne.Next())
+	require.Equal(t, 1, len(pageOne.Pagers()))
+	require.Equal(t, 0, pageOne.Pages().Len())
+	require.Equal(t, 0, pageOne.NumberOfElements())
+	require.Equal(t, 0, pageOne.TotalNumberOfElements())
+	require.Equal(t, 0, pageOne.TotalPages())
+	require.Equal(t, 1, pageOne.PageNumber())
+	require.Equal(t, 5, pageOne.PageSize())
+
+}
+
+func TestPaginationURLFactory(t *testing.T) {
+	t.Parallel()
+	cfg := viper.New()
+	cfg.Set("paginatePath", "zoo")
+
+	for _, uglyURLs := range []bool{false, true} {
+		t.Run(fmt.Sprintf("uglyURLs=%t", uglyURLs), func(t *testing.T) {
+
+			tests := []struct {
+				name         string
+				d            TargetPathDescriptor
+				baseURL      string
+				page         int
+				expected     string
+				expectedUgly string
+			}{
+				{"HTML home page 32",
+					TargetPathDescriptor{Kind: KindHome, Type: output.HTMLFormat}, "http://example.com/", 32, "/zoo/32/", "/zoo/32.html"},
+				{"JSON home page 42",
+					TargetPathDescriptor{Kind: KindHome, Type: output.JSONFormat}, "http://example.com/", 42, "/zoo/42/index.json", "/zoo/42.json"},
+			}
+
+			for _, test := range tests {
+				d := test.d
+				cfg.Set("baseURL", test.baseURL)
+				cfg.Set("uglyURLs", uglyURLs)
+				d.UglyURLs = uglyURLs
+
+				pathSpec := newTestPathSpecFor(cfg)
+				d.PathSpec = pathSpec
+
+				factory := newPaginationURLFactory(d)
+
+				got := factory(test.page)
+
+				if uglyURLs {
+					require.Equal(t, test.expectedUgly, got)
+				} else {
+					require.Equal(t, test.expected, got)
+				}
+
+			}
+		})
+
+	}
+}
+
+func TestProbablyEqualPageLists(t *testing.T) {
+	t.Parallel()
+	fivePages := createTestPages(5)
+	zeroPages := createTestPages(0)
+	zeroPagesByWeight, _ := createTestPages(0).GroupBy("Weight", "asc")
+	fivePagesByWeight, _ := createTestPages(5).GroupBy("Weight", "asc")
+	ninePagesByWeight, _ := createTestPages(9).GroupBy("Weight", "asc")
+
+	for i, this := range []struct {
+		v1     interface{}
+		v2     interface{}
+		expect bool
+	}{
+		{nil, nil, true},
+		{"a", "b", true},
+		{"a", fivePages, false},
+		{fivePages, "a", false},
+		{fivePages, createTestPages(2), false},
+		{fivePages, fivePages, true},
+		{zeroPages, zeroPages, true},
+		{fivePagesByWeight, fivePagesByWeight, true},
+		{zeroPagesByWeight, fivePagesByWeight, false},
+		{zeroPagesByWeight, zeroPagesByWeight, true},
+		{fivePagesByWeight, fivePages, false},
+		{fivePagesByWeight, ninePagesByWeight, false},
+	} {
+		result := probablyEqualPageLists(this.v1, this.v2)
+
+		if result != this.expect {
+			t.Errorf("[%d] got %t but expected %t", i, result, this.expect)
+
+		}
+	}
+}
+
+func TestPaginationPage(t *testing.T) {
+	t.Parallel()
+	urlFactory := func(page int) string {
+		return fmt.Sprintf("page/%d/", page)
+	}
+
+	fivePages := createTestPages(7)
+	fivePagesFuzzyWordCount, _ := createTestPages(7).GroupBy("FuzzyWordCount", "asc")
+
+	p1, _ := newPaginatorFromPages(fivePages, 2, urlFactory)
+	p2, _ := newPaginatorFromPageGroups(fivePagesFuzzyWordCount, 2, urlFactory)
+
+	f1 := p1.pagers[0].First()
+	f2 := p2.pagers[0].First()
+
+	page11, _ := f1.page(1)
+	page1Nil, _ := f1.page(3)
+
+	page21, _ := f2.page(1)
+	page2Nil, _ := f2.page(3)
+
+	require.Equal(t, 3, page11.FuzzyWordCount())
+	require.Nil(t, page1Nil)
+
+	require.NotNil(t, page21)
+	require.Equal(t, 3, page21.FuzzyWordCount())
+	require.Nil(t, page2Nil)
+}
diff --git a/resources/page/permalinks.go b/resources/page/permalinks.go
new file mode 100644
index 00000000000..98489231b2c
--- /dev/null
+++ b/resources/page/permalinks.go
@@ -0,0 +1,248 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+	"fmt"
+	"path/filepath"
+	"regexp"
+	"strconv"
+	"strings"
+
+	"github.com/pkg/errors"
+
+	"github.com/gohugoio/hugo/helpers"
+)
+
+// PermalinkExpander holds permalin mappings per section.
+type PermalinkExpander struct {
+	// knownPermalinkAttributes maps :tags in a permalink specification to a
+	// function which, given a page and the tag, returns the resulting string
+	// to be used to replace that tag.
+	knownPermalinkAttributes map[string]pageToPermaAttribute
+
+	expanders map[string]func(Page) (string, error)
+
+	ps *helpers.PathSpec
+}
+
+// NewPermalinkExpander creates a new PermalinkExpander configured by the given
+// PathSpec.
+func NewPermalinkExpander(ps *helpers.PathSpec) (PermalinkExpander, error) {
+
+	p := PermalinkExpander{ps: ps}
+
+	p.knownPermalinkAttributes = map[string]pageToPermaAttribute{
+		"year":        p.pageToPermalinkDate,
+		"month":       p.pageToPermalinkDate,
+		"monthname":   p.pageToPermalinkDate,
+		"day":         p.pageToPermalinkDate,
+		"weekday":     p.pageToPermalinkDate,
+		"weekdayname": p.pageToPermalinkDate,
+		"yearday":     p.pageToPermalinkDate,
+		"section":     p.pageToPermalinkSection,
+		"sections":    p.pageToPermalinkSections,
+		"title":       p.pageToPermalinkTitle,
+		"slug":        p.pageToPermalinkSlugElseTitle,
+		"filename":    p.pageToPermalinkFilename,
+	}
+
+	patterns := ps.Cfg.GetStringMapString("permalinks")
+	if patterns == nil {
+		return p, nil
+	}
+
+	e, err := p.parse(patterns)
+	if err != nil {
+		return p, err
+	}
+
+	p.expanders = e
+
+	return p, nil
+}
+
+// Expand expands the path in p according to the rules defined for the given key.
+// If no rules are found for the given key, an empty string is returned.
+func (l PermalinkExpander) Expand(key string, p Page) (string, error) {
+	expand, found := l.expanders[key]
+
+	if !found {
+		return "", nil
+	}
+
+	return expand(p)
+
+}
+
+func (l PermalinkExpander) parse(patterns map[string]string) (map[string]func(Page) (string, error), error) {
+
+	expanders := make(map[string]func(Page) (string, error))
+
+	for k, pattern := range patterns {
+		if !l.validate(pattern) {
+			return nil, &permalinkExpandError{pattern: pattern, err: errPermalinkIllFormed}
+		}
+
+		pattern := pattern
+		matches := attributeRegexp.FindAllStringSubmatch(pattern, -1)
+
+		callbacks := make([]pageToPermaAttribute, len(matches))
+		replacements := make([]string, len(matches))
+		for i, m := range matches {
+			replacement := m[0]
+			attr := replacement[1:]
+			replacements[i] = replacement
+			callback, ok := l.knownPermalinkAttributes[attr]
+
+			if !ok {
+				return nil, &permalinkExpandError{pattern: pattern, err: errPermalinkAttributeUnknown}
+			}
+
+			callbacks[i] = callback
+		}
+
+		expanders[k] = func(p Page) (string, error) {
+
+			if matches == nil {
+				return pattern, nil
+			}
+
+			newField := pattern
+
+			for i, replacement := range replacements {
+				attr := replacement[1:]
+				callback := callbacks[i]
+				newAttr, err := callback(p, attr)
+
+				if err != nil {
+					return "", &permalinkExpandError{pattern: pattern, err: err}
+				}
+
+				newField = strings.Replace(newField, replacement, newAttr, 1)
+
+			}
+
+			return newField, nil
+
+		}
+
+	}
+
+	return expanders, nil
+}
+
+// pageToPermaAttribute is the type of a function which, given a page and a tag
+// can return a string to go in that position in the page (or an error)
+type pageToPermaAttribute func(Page, string) (string, error)
+
+var attributeRegexp = regexp.MustCompile(`:\w+`)
+
+// validate determines if a PathPattern is well-formed
+func (l PermalinkExpander) validate(pp string) bool {
+	fragments := strings.Split(pp[1:], "/")
+	var bail = false
+	for i := range fragments {
+		if bail {
+			return false
+		}
+		if len(fragments[i]) == 0 {
+			bail = true
+			continue
+		}
+
+		matches := attributeRegexp.FindAllStringSubmatch(fragments[i], -1)
+		if matches == nil {
+			continue
+		}
+
+		for _, match := range matches {
+			k := strings.ToLower(match[0][1:])
+			if _, ok := l.knownPermalinkAttributes[k]; !ok {
+				return false
+			}
+		}
+	}
+	return true
+}
+
+type permalinkExpandError struct {
+	pattern string
+	err     error
+}
+
+func (pee *permalinkExpandError) Error() string {
+	return fmt.Sprintf("error expanding %q: %s", string(pee.pattern), pee.err)
+}
+
+var (
+	errPermalinkIllFormed        = errors.New("permalink ill-formed")
+	errPermalinkAttributeUnknown = errors.New("permalink attribute not recognised")
+)
+
+func (l PermalinkExpander) pageToPermalinkDate(p Page, dateField string) (string, error) {
+	// a Page contains a Node which provides a field Date, time.Time
+	switch dateField {
+	case "year":
+		return strconv.Itoa(p.Date().Year()), nil
+	case "month":
+		return fmt.Sprintf("%02d", int(p.Date().Month())), nil
+	case "monthname":
+		return p.Date().Month().String(), nil
+	case "day":
+		return fmt.Sprintf("%02d", p.Date().Day()), nil
+	case "weekday":
+		return strconv.Itoa(int(p.Date().Weekday())), nil
+	case "weekdayname":
+		return p.Date().Weekday().String(), nil
+	case "yearday":
+		return strconv.Itoa(p.Date().YearDay()), nil
+	}
+	//TODO: support classic strftime escapes too
+	// (and pass those through despite not being in the map)
+	panic("coding error: should not be here")
+}
+
+// pageToPermalinkTitle returns the URL-safe form of the title
+func (l PermalinkExpander) pageToPermalinkTitle(p Page, _ string) (string, error) {
+	return l.ps.URLize(p.Title()), nil
+}
+
+// pageToPermalinkFilename returns the URL-safe form of the filename
+func (l PermalinkExpander) pageToPermalinkFilename(p Page, _ string) (string, error) {
+	name := p.File().TranslationBaseName()
+	if name == "index" {
+		// Page bundles; the directory name will hopefully have a better name.
+		dir := strings.TrimSuffix(p.File().Dir(), helpers.FilePathSeparator)
+		_, name = filepath.Split(dir)
+	}
+
+	return l.ps.URLize(name), nil
+}
+
+// if the page has a slug, return the slug, else return the title
+func (l PermalinkExpander) pageToPermalinkSlugElseTitle(p Page, a string) (string, error) {
+	if p.Slug() != "" {
+		return l.ps.URLize(p.Slug()), nil
+	}
+	return l.pageToPermalinkTitle(p, a)
+}
+
+func (l PermalinkExpander) pageToPermalinkSection(p Page, _ string) (string, error) {
+	return p.Section(), nil
+}
+
+func (l PermalinkExpander) pageToPermalinkSections(p Page, _ string) (string, error) {
+	return p.CurrentSection().SectionsPath(), nil
+}
diff --git a/resources/page/permalinks_test.go b/resources/page/permalinks_test.go
new file mode 100644
index 00000000000..d7af7e06da4
--- /dev/null
+++ b/resources/page/permalinks_test.go
@@ -0,0 +1,180 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+	"fmt"
+	"sync"
+	"testing"
+	"time"
+
+	"github.com/stretchr/testify/require"
+)
+
+// testdataPermalinks is used by a couple of tests; the expandsTo content is
+// subject to the data in simplePageJSON.
+var testdataPermalinks = []struct {
+	spec      string
+	valid     bool
+	expandsTo string
+}{
+	{":title", true, "spf13-vim-3.0-release-and-new-website"},
+	{"/:year-:month-:title", true, "/2012-04-spf13-vim-3.0-release-and-new-website"},
+	{"/:year/:yearday/:month/:monthname/:day/:weekday/:weekdayname/", true, "/2012/97/04/April/06/5/Friday/"}, // Dates
+	{"/:section/", true, "/blue/"},                                // Section
+	{"/:title/", true, "/spf13-vim-3.0-release-and-new-website/"}, // Title
+	{"/:slug/", true, "/the-slug/"},                               // Slug
+	{"/:filename/", true, "/test-page/"},                          // Filename
+	// TODO(moorereason): need test scaffolding for this.
+	//{"/:sections/", false, "/blue/"},                              // Sections
+
+	// Failures
+	{"/blog/:fred", false, ""},
+	{"/:year//:title", false, ""},
+}
+
+func TestPermalinkExpansion(t *testing.T) {
+	t.Parallel()
+
+	assert := require.New(t)
+
+	page := newTestPageWithFile("/test-page/index.md")
+	page.title = "Spf13 Vim 3.0 Release and new website"
+	d, _ := time.Parse("2006-01-02", "2012-04-06")
+	page.date = d
+	page.section = "blue"
+	page.slug = "The Slug"
+
+	for i, item := range testdataPermalinks {
+
+		msg := fmt.Sprintf("Test %d", i)
+
+		if !item.valid {
+			continue
+		}
+
+		permalinksConfig := map[string]string{
+			"posts": item.spec,
+		}
+
+		ps := newTestPathSpec()
+		ps.Cfg.Set("permalinks", permalinksConfig)
+
+		expander, err := NewPermalinkExpander(ps)
+		assert.NoError(err)
+
+		expanded, err := expander.Expand("posts", page)
+		assert.NoError(err)
+		assert.Equal(item.expandsTo, expanded, msg)
+
+	}
+}
+
+func TestPermalinkExpansionMultiSection(t *testing.T) {
+	t.Parallel()
+
+	assert := require.New(t)
+
+	page := newTestPage()
+	page.title = "Page Title"
+	d, _ := time.Parse("2006-01-02", "2012-04-06")
+	page.date = d
+	page.section = "blue"
+	page.slug = "The Slug"
+
+	permalinksConfig := map[string]string{
+		"posts": "/:slug",
+		"blog":  "/:section/:year",
+	}
+
+	ps := newTestPathSpec()
+	ps.Cfg.Set("permalinks", permalinksConfig)
+
+	expander, err := NewPermalinkExpander(ps)
+	assert.NoError(err)
+
+	expanded, err := expander.Expand("posts", page)
+	assert.NoError(err)
+	assert.Equal("/the-slug", expanded)
+
+	expanded, err = expander.Expand("blog", page)
+	assert.NoError(err)
+	assert.Equal("/blue/2012", expanded)
+
+}
+
+func TestPermalinkExpansionConcurrent(t *testing.T) {
+	t.Parallel()
+
+	assert := require.New(t)
+
+	permalinksConfig := map[string]string{
+		"posts": "/:slug/",
+	}
+
+	ps := newTestPathSpec()
+	ps.Cfg.Set("permalinks", permalinksConfig)
+
+	expander, err := NewPermalinkExpander(ps)
+	assert.NoError(err)
+
+	var wg sync.WaitGroup
+
+	for i := 1; i < 20; i++ {
+		wg.Add(1)
+		go func(i int) {
+			defer wg.Done()
+			page := newTestPage()
+			for j := 1; j < 20; j++ {
+				page.slug = fmt.Sprintf("slug%d", i+j)
+				expanded, err := expander.Expand("posts", page)
+				assert.NoError(err)
+				assert.Equal(fmt.Sprintf("/%s/", page.slug), expanded)
+			}
+		}(i)
+	}
+
+	wg.Wait()
+}
+
+func BenchmarkPermalinkExpand(b *testing.B) {
+	page := newTestPage()
+	page.title = "Hugo Rocks"
+	d, _ := time.Parse("2006-01-02", "2019-02-28")
+	page.date = d
+
+	permalinksConfig := map[string]string{
+		"posts": "/:year-:month-:title",
+	}
+
+	ps := newTestPathSpec()
+	ps.Cfg.Set("permalinks", permalinksConfig)
+
+	expander, err := NewPermalinkExpander(ps)
+	if err != nil {
+		b.Fatal(err)
+	}
+
+	b.ResetTimer()
+	for i := 0; i < b.N; i++ {
+		s, err := expander.Expand("posts", page)
+		if err != nil {
+			b.Fatal(err)
+		}
+		if s != "/2019-02-hugo-rocks" {
+			b.Fatal(s)
+		}
+
+	}
+}
diff --git a/resources/page/site.go b/resources/page/site.go
new file mode 100644
index 00000000000..25df063f1b1
--- /dev/null
+++ b/resources/page/site.go
@@ -0,0 +1,53 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+	"html/template"
+	"time"
+
+	"github.com/gohugoio/hugo/common/hugo"
+	"github.com/gohugoio/hugo/langs"
+	"github.com/gohugoio/hugo/navigation"
+)
+
+// Site represents a site in the build. This is currently a very narrow interface,
+// but the actual implementation will be richer, see hugolib.SiteInfo.
+type Site interface {
+	Language() *langs.Language
+	RegularPages() Pages
+	Pages() Pages
+	IsServer() bool
+	ServerPort() int
+	Title() string
+	Sites() Sites
+	Hugo() hugo.Info
+	BaseURL() template.URL
+	Taxonomies() interface{}
+	LastChange() time.Time
+	Menus() navigation.Menus
+	Params() map[string]interface{}
+	Data() map[string]interface{}
+}
+
+// Sites represents an ordered list of sites (languages).
+type Sites []Site
+
+// First is a convenience method to get the first Site, i.e. the main language.
+func (s Sites) First() Site {
+	if len(s) == 0 {
+		return nil
+	}
+	return s[0]
+}
diff --git a/resources/page/testhelpers_test.go b/resources/page/testhelpers_test.go
new file mode 100644
index 00000000000..c2bcca0a5bc
--- /dev/null
+++ b/resources/page/testhelpers_test.go
@@ -0,0 +1,554 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+	"fmt"
+	"html/template"
+	"os"
+	"path/filepath"
+	"time"
+
+	"github.com/bep/gitmap"
+	"github.com/gohugoio/hugo/helpers"
+	"github.com/gohugoio/hugo/resources/resource"
+	"github.com/spf13/viper"
+
+	"github.com/gohugoio/hugo/navigation"
+
+	"github.com/gohugoio/hugo/common/hugo"
+	"github.com/gohugoio/hugo/common/maps"
+	"github.com/gohugoio/hugo/config"
+	"github.com/gohugoio/hugo/hugofs"
+	"github.com/gohugoio/hugo/langs"
+	"github.com/gohugoio/hugo/media"
+	"github.com/gohugoio/hugo/related"
+
+	"github.com/gohugoio/hugo/source"
+)
+
+var (
+	_ resource.LengthProvider = (*testPage)(nil)
+	_ Page                    = (*testPage)(nil)
+)
+
+var relatedDocsHandler = NewRelatedDocsHandler(related.DefaultConfig)
+
+func newTestPage() *testPage {
+	return newTestPageWithFile("/a/b/c.md")
+}
+
+func newTestPageWithFile(filename string) *testPage {
+	filename = filepath.FromSlash(filename)
+	file := source.NewTestFile(filename)
+	return &testPage{
+		params: make(map[string]interface{}),
+		data:   make(map[string]interface{}),
+		file:   file,
+	}
+}
+
+func newTestPathSpec() *helpers.PathSpec {
+	return newTestPathSpecFor(viper.New())
+}
+
+func newTestPathSpecFor(cfg config.Provider) *helpers.PathSpec {
+	config.SetBaseTestDefaults(cfg)
+	fs := hugofs.NewMem(cfg)
+	s, err := helpers.NewPathSpec(fs, cfg)
+	if err != nil {
+		panic(err)
+	}
+	return s
+}
+
+type testPage struct {
+	description string
+	title       string
+	linkTitle   string
+
+	section string
+
+	content string
+
+	fuzzyWordCount int
+
+	path string
+
+	slug string
+
+	// Dates
+	date       time.Time
+	lastMod    time.Time
+	expiryDate time.Time
+	pubDate    time.Time
+
+	weight int
+
+	params map[string]interface{}
+	data   map[string]interface{}
+
+	file source.File
+}
+
+func (p *testPage) Aliases() []string {
+	panic("not implemented")
+}
+
+func (p *testPage) AllTranslations() Pages {
+	panic("not implemented")
+}
+
+func (p *testPage) AlternativeOutputFormats() OutputFormats {
+	panic("not implemented")
+}
+
+func (p *testPage) Author() Author {
+	return Author{}
+
+}
+func (p *testPage) Authors() AuthorList {
+	return nil
+}
+
+func (p *testPage) BaseFileName() string {
+	panic("not implemented")
+}
+
+func (p *testPage) BundleType() string {
+	panic("not implemented")
+}
+
+func (p *testPage) Content() (interface{}, error) {
+	panic("not implemented")
+}
+
+func (p *testPage) ContentBaseName() string {
+	panic("not implemented")
+}
+
+func (p *testPage) CurrentSection() Page {
+	panic("not implemented")
+}
+
+func (p *testPage) Data() interface{} {
+	return p.data
+}
+
+func (p *testPage) Sitemap() config.Sitemap {
+	return config.Sitemap{}
+}
+
+func (p *testPage) Layout() string {
+	return ""
+}
+func (p *testPage) Date() time.Time {
+	return p.date
+}
+
+func (p *testPage) Description() string {
+	return ""
+}
+
+func (p *testPage) Dir() string {
+	panic("not implemented")
+}
+
+func (p *testPage) Draft() bool {
+	panic("not implemented")
+}
+
+func (p *testPage) Eq(other interface{}) bool {
+	return p == other
+}
+
+func (p *testPage) ExpiryDate() time.Time {
+	return p.expiryDate
+}
+
+func (p *testPage) Ext() string {
+	panic("not implemented")
+}
+
+func (p *testPage) Extension() string {
+	panic("not implemented")
+}
+
+func (p *testPage) File() source.File {
+	return p.file
+}
+
+func (p *testPage) FileInfo() os.FileInfo {
+	panic("not implemented")
+}
+
+func (p *testPage) Filename() string {
+	panic("not implemented")
+}
+
+func (p *testPage) FirstSection() Page {
+	panic("not implemented")
+}
+
+func (p *testPage) FuzzyWordCount() int {
+	return p.fuzzyWordCount
+}
+
+func (p *testPage) GetPage(ref string) (Page, error) {
+	panic("not implemented")
+}
+
+func (p *testPage) GetParam(key string) interface{} {
+	panic("not implemented")
+}
+
+func (p *testPage) GetRelatedDocsHandler() *RelatedDocsHandler {
+	return relatedDocsHandler
+}
+
+func (p *testPage) GitInfo() *gitmap.GitInfo {
+	return nil
+}
+
+func (p *testPage) HasMenuCurrent(menuID string, me *navigation.MenuEntry) bool {
+	panic("not implemented")
+}
+
+func (p *testPage) HasShortcode(name string) bool {
+	panic("not implemented")
+}
+
+func (p *testPage) Hugo() hugo.Info {
+	panic("not implemented")
+}
+
+func (p *testPage) InSection(other interface{}) (bool, error) {
+	panic("not implemented")
+}
+
+func (p *testPage) IsAncestor(other interface{}) (bool, error) {
+	panic("not implemented")
+}
+
+func (p *testPage) IsDescendant(other interface{}) (bool, error) {
+	panic("not implemented")
+}
+
+func (p *testPage) IsDraft() bool {
+	return false
+}
+
+func (p *testPage) IsHome() bool {
+	panic("not implemented")
+}
+
+func (p *testPage) IsMenuCurrent(menuID string, inme *navigation.MenuEntry) bool {
+	panic("not implemented")
+}
+
+func (p *testPage) IsNode() bool {
+	panic("not implemented")
+}
+
+func (p *testPage) IsPage() bool {
+	panic("not implemented")
+}
+
+func (p *testPage) IsSection() bool {
+	panic("not implemented")
+}
+
+func (p *testPage) IsTranslated() bool {
+	panic("not implemented")
+}
+
+func (p *testPage) Keywords() []string {
+	return nil
+}
+
+func (p *testPage) Kind() string {
+	panic("not implemented")
+}
+
+func (p *testPage) Lang() string {
+	panic("not implemented")
+}
+
+func (p *testPage) Language() *langs.Language {
+	panic("not implemented")
+}
+
+func (p *testPage) LanguagePrefix() string {
+	return ""
+}
+
+func (p *testPage) Lastmod() time.Time {
+	return p.lastMod
+}
+
+func (p *testPage) Len() int {
+	return len(p.content)
+}
+
+func (p *testPage) LinkTitle() string {
+	if p.linkTitle == "" {
+		return p.title
+	}
+	return p.linkTitle
+}
+
+func (p *testPage) LogicalName() string {
+	panic("not implemented")
+}
+
+func (p *testPage) MediaType() media.Type {
+	panic("not implemented")
+}
+
+func (p *testPage) Menus() navigation.PageMenus {
+	return navigation.PageMenus{}
+}
+
+func (p *testPage) Name() string {
+	panic("not implemented")
+}
+
+func (p *testPage) Next() Page {
+	panic("not implemented")
+}
+
+func (p *testPage) NextInSection() Page {
+	return nil
+}
+
+func (p *testPage) NextPage() Page {
+	return nil
+}
+
+func (p *testPage) OutputFormats() OutputFormats {
+	panic("not implemented")
+}
+
+func (p *testPage) Pages() Pages {
+	panic("not implemented")
+}
+
+func (p *testPage) Paginate(seq interface{}, options ...interface{}) (*Pager, error) {
+	return nil, nil
+}
+
+func (p *testPage) Paginator(options ...interface{}) (*Pager, error) {
+	return nil, nil
+}
+
+func (p *testPage) Param(key interface{}) (interface{}, error) {
+	return resource.Param(p, nil, key)
+}
+
+func (p *testPage) Params() map[string]interface{} {
+	return p.params
+}
+
+func (p *testPage) Parent() Page {
+	panic("not implemented")
+}
+
+func (p *testPage) Path() string {
+	return p.path
+}
+
+func (p *testPage) Permalink() string {
+	panic("not implemented")
+}
+
+func (p *testPage) Plain() string {
+	panic("not implemented")
+}
+
+func (p *testPage) PlainWords() []string {
+	panic("not implemented")
+}
+
+func (p *testPage) Prev() Page {
+	panic("not implemented")
+}
+
+func (p *testPage) PrevInSection() Page {
+	return nil
+}
+
+func (p *testPage) PrevPage() Page {
+	return nil
+}
+
+func (p *testPage) PublishDate() time.Time {
+	return p.pubDate
+}
+
+func (p *testPage) RSSLink() template.URL {
+	return ""
+}
+
+func (p *testPage) RawContent() string {
+	panic("not implemented")
+}
+
+func (p *testPage) ReadingTime() int {
+	panic("not implemented")
+}
+
+func (p *testPage) Ref(argsm map[string]interface{}) (string, error) {
+	panic("not implemented")
+}
+
+func (p *testPage) RefFrom(argsm map[string]interface{}, source interface{}) (string, error) {
+	return "", nil
+}
+
+func (p *testPage) RelPermalink() string {
+	panic("not implemented")
+}
+
+func (p *testPage) RelRef(argsm map[string]interface{}) (string, error) {
+	panic("not implemented")
+}
+
+func (p *testPage) RelRefFrom(argsm map[string]interface{}, source interface{}) (string, error) {
+	return "", nil
+}
+
+func (p *testPage) Render(layout ...string) template.HTML {
+	panic("not implemented")
+}
+
+func (p *testPage) ResourceType() string {
+	panic("not implemented")
+}
+
+func (p *testPage) Resources() resource.Resources {
+	panic("not implemented")
+}
+
+func (p *testPage) Scratch() *maps.Scratch {
+	panic("not implemented")
+}
+
+func (p *testPage) RelatedKeywords(cfg related.IndexConfig) ([]related.Keyword, error) {
+	v, err := p.Param(cfg.Name)
+	if err != nil {
+		return nil, err
+	}
+
+	return cfg.ToKeywords(v)
+}
+
+func (p *testPage) Section() string {
+	return p.section
+}
+
+func (p *testPage) Sections() Pages {
+	panic("not implemented")
+}
+
+func (p *testPage) SectionsEntries() []string {
+	panic("not implemented")
+}
+
+func (p *testPage) SectionsPath() string {
+	panic("not implemented")
+}
+
+func (p *testPage) Site() Site {
+	panic("not implemented")
+}
+
+func (p *testPage) Sites() Sites {
+	panic("not implemented")
+}
+
+func (p *testPage) Slug() string {
+	return p.slug
+}
+
+func (p *testPage) String() string {
+	return p.path
+}
+
+func (p *testPage) Summary() template.HTML {
+	panic("not implemented")
+}
+
+func (p *testPage) TableOfContents() template.HTML {
+	panic("not implemented")
+}
+
+func (p *testPage) Title() string {
+	return p.title
+}
+
+func (p *testPage) TranslationBaseName() string {
+	panic("not implemented")
+}
+
+func (p *testPage) TranslationKey() string {
+	return p.path
+}
+
+func (p *testPage) Translations() Pages {
+	panic("not implemented")
+}
+
+func (p *testPage) Truncated() bool {
+	panic("not implemented")
+}
+
+func (p *testPage) Type() string {
+	return p.section
+}
+
+func (p *testPage) URL() string {
+	return ""
+}
+
+func (p *testPage) UniqueID() string {
+	panic("not implemented")
+}
+
+func (p *testPage) Weight() int {
+	return p.weight
+}
+
+func (p *testPage) WordCount() int {
+	panic("not implemented")
+}
+
+func createTestPages(num int) Pages {
+	pages := make(Pages, num)
+
+	for i := 0; i < num; i++ {
+		m := &testPage{
+			path:           fmt.Sprintf("/x/y/z/p%d.md", i),
+			weight:         5,
+			fuzzyWordCount: i + 2, // magic
+		}
+
+		if i%2 == 0 {
+			m.weight = 10
+		}
+		pages[i] = m
+
+	}
+
+	return pages
+}
diff --git a/resources/page/weighted.go b/resources/page/weighted.go
new file mode 100644
index 00000000000..0937b3f8617
--- /dev/null
+++ b/resources/page/weighted.go
@@ -0,0 +1,140 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+	"fmt"
+	"sort"
+
+	"github.com/gohugoio/hugo/common/collections"
+)
+
+var (
+	_ collections.Slicer = WeightedPage{}
+)
+
+// WeightedPages is a list of Pages with their corresponding (and relative) weight
+// [{Weight: 30, Page: *1}, {Weight: 40, Page: *2}]
+type WeightedPages []WeightedPage
+
+// Page will return the Page (of Kind taxonomyList) that represents this set
+// of pages. This method will panic if p is empty, as that should never happen.
+func (p WeightedPages) Page() Page {
+	if len(p) == 0 {
+		panic("WeightedPages is empty")
+	}
+
+	first := p[0]
+
+	// TODO(bep) fix tests
+	if first.getOwner == nil {
+		return nil
+	}
+
+	return first.getOwner()
+}
+
+// A WeightedPage is a Page with a weight.
+type WeightedPage struct {
+	Weight int
+	Page
+
+	// A callback used to fetch the owning Page. This avoids having to do
+	// manual .Site.GetPage lookups. It is implemented in this roundabout way
+	// because we cannot add additional state to the WeightedPages slice
+	// without breaking lots of templates in the wild.
+	getOwner func() Page
+}
+
+func NewWeightedPage(weight int, p Page, getOwner func() Page) WeightedPage {
+	return WeightedPage{Weight: weight, Page: p, getOwner: getOwner}
+}
+
+func (w WeightedPage) String() string {
+	return fmt.Sprintf("WeightedPage(%d,%q)", w.Weight, w.Page.Title())
+}
+
+// Slice is not meant to be used externally. It's a bridge function
+// for the template functions. See collections.Slice.
+func (p WeightedPage) Slice(in interface{}) (interface{}, error) {
+	switch items := in.(type) {
+	case WeightedPages:
+		return items, nil
+	case []interface{}:
+		weighted := make(WeightedPages, len(items))
+		for i, v := range items {
+			g, ok := v.(WeightedPage)
+			if !ok {
+				return nil, fmt.Errorf("type %T is not a WeightedPage", v)
+			}
+			weighted[i] = g
+		}
+		return weighted, nil
+	default:
+		return nil, fmt.Errorf("invalid slice type %T", items)
+	}
+}
+
+// Pages returns the Pages in this weighted page set.
+func (wp WeightedPages) Pages() Pages {
+	pages := make(Pages, len(wp))
+	for i := range wp {
+		pages[i] = wp[i].Page
+	}
+	return pages
+}
+
+// Prev returns the previous Page relative to the given Page in
+// this weighted page set.
+func (wp WeightedPages) Prev(cur Page) Page {
+	for x, c := range wp {
+		if c.Page == cur {
+			if x == 0 {
+				return wp[len(wp)-1].Page
+			}
+			return wp[x-1].Page
+		}
+	}
+	return nil
+}
+
+// Next returns the next Page relative to the given Page in
+// this weighted page set.
+func (wp WeightedPages) Next(cur Page) Page {
+	for x, c := range wp {
+		if c.Page == cur {
+			if x < len(wp)-1 {
+				return wp[x+1].Page
+			}
+			return wp[0].Page
+		}
+	}
+	return nil
+}
+
+func (wp WeightedPages) Len() int      { return len(wp) }
+func (wp WeightedPages) Swap(i, j int) { wp[i], wp[j] = wp[j], wp[i] }
+
+// Sort stable sorts this weighted page set.
+func (wp WeightedPages) Sort() { sort.Stable(wp) }
+
+// Count returns the number of pages in this weighted page set.
+func (wp WeightedPages) Count() int { return len(wp) }
+
+func (wp WeightedPages) Less(i, j int) bool {
+	if wp[i].Weight == wp[j].Weight {
+		return DefaultPageSort(wp[i].Page, wp[j].Page)
+	}
+	return wp[i].Weight < wp[j].Weight
+}
diff --git a/resources/resource.go b/resources/resource.go
index 742903e80a0..abd25154856 100644
--- a/resources/resource.go
+++ b/resources/resource.go
@@ -34,6 +34,7 @@ import (
 	"github.com/gohugoio/hugo/common/collections"
 	"github.com/gohugoio/hugo/common/hugio"
 	"github.com/gohugoio/hugo/common/loggers"
+	"github.com/gohugoio/hugo/resources/page"
 	"github.com/gohugoio/hugo/resources/resource"
 
 	"github.com/spf13/afero"
@@ -61,7 +62,7 @@ type permalinker interface {
 	permalinkFor(target string) string
 	relTargetPathsFor(target string) []string
 	relTargetPaths() []string
-	targetPath() string
+	TargetPath() string
 }
 
 type Spec struct {
@@ -74,6 +75,8 @@ type Spec struct {
 
 	TextTemplates tpl.TemplateParseFinder
 
+	Permalinks page.PermalinkExpander
+
 	// Holds default filter settings etc.
 	imaging *Imaging
 
@@ -98,11 +101,17 @@ func NewSpec(
 		logger = loggers.NewErrorLogger()
 	}
 
+	permalinks, err := page.NewPermalinkExpander(s)
+	if err != nil {
+		return nil, err
+	}
+
 	rs := &Spec{PathSpec: s,
 		Logger:        logger,
 		imaging:       &imaging,
 		MediaTypes:    mimeTypes,
 		OutputFormats: outputFormats,
+		Permalinks:    permalinks,
 		FileCaches:    fileCaches,
 		imageCache: newImageCache(
 			fileCaches.ImageCache(),
@@ -117,8 +126,8 @@ func NewSpec(
 }
 
 type ResourceSourceDescriptor struct {
-	// TargetPathBuilder is a callback to create target paths's relative to its owner.
-	TargetPathBuilder func(base string) string
+	// TargetPaths is a callback to fetch paths's relative to its owner.
+	TargetPaths func() page.TargetPaths
 
 	// Need one of these to load the resource content.
 	SourceFile         source.File
@@ -130,10 +139,6 @@ type ResourceSourceDescriptor struct {
 	// The relative target filename without any language code.
 	RelTargetFilename string
 
-	// Any base path prepeneded to the permalink.
-	// Typically the language code if this resource should be published to its sub-folder.
-	URLBase string
-
 	// Any base paths prepended to the target path. This will also typically be the
 	// language code, but setting it here means that it should not have any effect on
 	// the permalink.
@@ -216,6 +221,9 @@ func (r *Spec) newResource(sourceFs afero.Fs, fd ResourceSourceDescriptor) (reso
 	}
 
 	if !found {
+		// A fallback. Note that mime.TypeByExtension is slow by Hugo standards,
+		// so we should configure media types to avoid this lookup for most
+		// situations.
 		mimeStr := mime.TypeByExtension(ext)
 		if mimeStr != "" {
 			mimeType, _ = media.FromStringAndExt(mimeStr, ext)
@@ -226,9 +234,8 @@ func (r *Spec) newResource(sourceFs afero.Fs, fd ResourceSourceDescriptor) (reso
 		sourceFs,
 		fd.LazyPublish,
 		fd.OpenReadSeekCloser,
-		fd.URLBase,
 		fd.TargetBasePaths,
-		fd.TargetPathBuilder,
+		fd.TargetPaths,
 		fi,
 		sourceFilename,
 		fd.RelTargetFilename,
@@ -307,11 +314,7 @@ type resourcePathDescriptor struct {
 	relTargetDirFile dirFile
 
 	// Callback used to construct a target path relative to its owner.
-	targetPathBuilder func(rel string) string
-
-	// baseURLDir is the fixed sub-folder for a resource in permalinks. This will typically
-	// be the language code if we publish to the language's sub-folder.
-	baseURLDir string
+	targetPathBuilder func() page.TargetPaths
 
 	// This will normally be the same as above, but this will only apply to publishing
 	// of resources. It may be mulltiple values when in multihost mode.
@@ -531,7 +534,7 @@ func (l *genericResource) relTargetPathsFor(target string) []string {
 }
 
 func (l *genericResource) relTargetPaths() []string {
-	return l.relTargetPathsForRel(l.targetPath())
+	return l.relTargetPathsForRel(l.TargetPath())
 }
 
 func (l *genericResource) Name() string {
@@ -596,15 +599,23 @@ func (l *genericResource) relTargetPathForRel(rel string, addBaseTargetPath, isA
 	return l.relTargetPathForRelAndBasePath(rel, basePath, isAbs, isURL)
 }
 
-func (l *genericResource) relTargetPathForRelAndBasePath(rel, basePath string, isAbs, isURL bool) string {
-	if l.targetPathBuilder != nil {
-		rel = l.targetPathBuilder(rel)
+func (l *genericResource) createBasePath(rel string, isURL bool) string {
+	if l.targetPathBuilder == nil {
+		return rel
 	}
+	tp := l.targetPathBuilder()
 
-	if isURL && l.baseURLDir != "" {
-		rel = path.Join(l.baseURLDir, rel)
+	if isURL {
+		return path.Join(tp.SubResourceBaseLink, rel)
 	}
 
+	// TODO(bep) path
+	return path.Join(filepath.ToSlash(tp.SubResourceBaseTarget), rel)
+}
+
+func (l *genericResource) relTargetPathForRelAndBasePath(rel, basePath string, isAbs, isURL bool) string {
+	rel = l.createBasePath(rel, isURL)
+
 	if basePath != "" {
 		rel = path.Join(basePath, rel)
 	}
@@ -641,6 +652,7 @@ func (l *genericResource) Publish() error {
 		return err
 	}
 	defer fr.Close()
+
 	fw, err := helpers.OpenFilesForWriting(l.spec.BaseFs.PublishFs, l.targetFilenames()...)
 	if err != nil {
 		return err
@@ -652,7 +664,7 @@ func (l *genericResource) Publish() error {
 }
 
 // Path is stored with Unix style slashes.
-func (l *genericResource) targetPath() string {
+func (l *genericResource) TargetPath() string {
 	return l.relTargetDirFile.path()
 }
 
@@ -666,7 +678,7 @@ func (l *genericResource) targetFilenames() []string {
 
 // TODO(bep) clean up below
 func (r *Spec) newGenericResource(sourceFs afero.Fs,
-	targetPathBuilder func(base string) string,
+	targetPathBuilder func() page.TargetPaths,
 	osFileInfo os.FileInfo,
 	sourceFilename,
 	baseFilename string,
@@ -675,7 +687,6 @@ func (r *Spec) newGenericResource(sourceFs afero.Fs,
 		sourceFs,
 		false,
 		nil,
-		"",
 		nil,
 		targetPathBuilder,
 		osFileInfo,
@@ -690,9 +701,8 @@ func (r *Spec) newGenericResourceWithBase(
 	sourceFs afero.Fs,
 	lazyPublish bool,
 	openReadSeekerCloser resource.OpenReadSeekCloser,
-	urlBaseDir string,
 	targetPathBaseDirs []string,
-	targetPathBuilder func(base string) string,
+	targetPathBuilder func() page.TargetPaths,
 	osFileInfo os.FileInfo,
 	sourceFilename,
 	baseFilename string,
@@ -711,8 +721,7 @@ func (r *Spec) newGenericResourceWithBase(
 	}
 
 	pathDescriptor := resourcePathDescriptor{
-		baseURLDir:         urlBaseDir,
-		baseTargetPathDirs: targetPathBaseDirs,
+		baseTargetPathDirs: helpers.UniqueStrings(targetPathBaseDirs),
 		targetPathBuilder:  targetPathBuilder,
 		relTargetDirFile:   dirFile{dir: fpath, file: fname},
 	}
diff --git a/resources/resource/dates.go b/resources/resource/dates.go
new file mode 100644
index 00000000000..f26c44787b4
--- /dev/null
+++ b/resources/resource/dates.go
@@ -0,0 +1,81 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resource
+
+import "time"
+
+var _ Dated = Dates{}
+
+// Dated wraps a "dated resource". These are the 4 dates that makes
+// the date logic in Hugo.
+type Dated interface {
+	Date() time.Time
+	Lastmod() time.Time
+	PublishDate() time.Time
+	ExpiryDate() time.Time
+}
+
+// Dates holds the 4 Hugo dates.
+type Dates struct {
+	FDate        time.Time
+	FLastmod     time.Time
+	FPublishDate time.Time
+	FExpiryDate  time.Time
+}
+
+func (d *Dates) UpdateDateAndLastmodIfAfter(in Dated) {
+	if in.Date().After(d.Date()) {
+		d.FDate = in.Date()
+	}
+	if in.Lastmod().After(d.Lastmod()) {
+		d.FLastmod = in.Lastmod()
+	}
+}
+
+// IsFuture returns whether the argument represents the future.
+func IsFuture(d Dated) bool {
+	if d.PublishDate().IsZero() {
+		return false
+	}
+	return d.PublishDate().After(time.Now())
+}
+
+// IsExpired returns whether the argument is expired.
+func IsExpired(d Dated) bool {
+	if d.ExpiryDate().IsZero() {
+		return false
+	}
+	return d.ExpiryDate().Before(time.Now())
+}
+
+// IsZeroDates returns true if all of the dates are zero.
+func IsZeroDates(d Dated) bool {
+	return d.Date().IsZero() && d.Lastmod().IsZero() && d.ExpiryDate().IsZero() && d.PublishDate().IsZero()
+}
+
+func (p Dates) Date() time.Time {
+	return p.FDate
+}
+
+func (p Dates) Lastmod() time.Time {
+	return p.FLastmod
+}
+
+func (p Dates) PublishDate() time.Time {
+	return p.FPublishDate
+}
+
+func (p Dates) ExpiryDate() time.Time {
+	return p.FExpiryDate
+}
diff --git a/resources/resource/params.go b/resources/resource/params.go
new file mode 100644
index 00000000000..f6ecea35ad1
--- /dev/null
+++ b/resources/resource/params.go
@@ -0,0 +1,89 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resource
+
+import (
+	"strings"
+
+	"github.com/spf13/cast"
+)
+
+func Param(r ResourceParamsProvider, fallback map[string]interface{}, key interface{}) (interface{}, error) {
+	keyStr, err := cast.ToStringE(key)
+	if err != nil {
+		return nil, err
+	}
+
+	keyStr = strings.ToLower(keyStr)
+	result, _ := traverseDirectParams(r, fallback, keyStr)
+	if result != nil {
+		return result, nil
+	}
+
+	keySegments := strings.Split(keyStr, ".")
+	if len(keySegments) == 1 {
+		return nil, nil
+	}
+
+	return traverseNestedParams(r, fallback, keySegments)
+}
+
+func traverseDirectParams(r ResourceParamsProvider, fallback map[string]interface{}, key string) (interface{}, error) {
+	keyStr := strings.ToLower(key)
+	if val, ok := r.Params()[keyStr]; ok {
+		return val, nil
+	}
+
+	if fallback == nil {
+		return nil, nil
+	}
+
+	return fallback[keyStr], nil
+}
+
+func traverseNestedParams(r ResourceParamsProvider, fallback map[string]interface{}, keySegments []string) (interface{}, error) {
+	result := traverseParams(keySegments, r.Params())
+	if result != nil {
+		return result, nil
+	}
+
+	if fallback != nil {
+		result = traverseParams(keySegments, fallback)
+		if result != nil {
+			return result, nil
+		}
+	}
+
+	// Didn't find anything, but also no problems.
+	return nil, nil
+}
+
+func traverseParams(keys []string, m map[string]interface{}) interface{} {
+	// Shift first element off.
+	firstKey, rest := keys[0], keys[1:]
+	result := m[firstKey]
+
+	// No point in continuing here.
+	if result == nil {
+		return result
+	}
+
+	if len(rest) == 0 {
+		// That was the last key.
+		return result
+	}
+
+	// That was not the last key.
+	return traverseParams(rest, cast.ToStringMap(result))
+}
diff --git a/resources/resource/resource_helpers.go b/resources/resource/resource_helpers.go
new file mode 100644
index 00000000000..b0830a83c87
--- /dev/null
+++ b/resources/resource/resource_helpers.go
@@ -0,0 +1,70 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resource
+
+import (
+	"strings"
+	"time"
+
+	"github.com/gohugoio/hugo/helpers"
+
+	"github.com/spf13/cast"
+)
+
+// GetParam will return the param with the given key from the Resource,
+// nil if not found.
+func GetParam(r Resource, key string) interface{} {
+	return getParam(r, key, false)
+}
+
+// GetParamToLower is the same as GetParam but it will lower case any string
+// result, including string slices.
+func GetParamToLower(r Resource, key string) interface{} {
+	return getParam(r, key, true)
+}
+
+func getParam(r Resource, key string, stringToLower bool) interface{} {
+	v := r.Params()[strings.ToLower(key)]
+
+	if v == nil {
+		return nil
+	}
+
+	switch val := v.(type) {
+	case bool:
+		return val
+	case string:
+		if stringToLower {
+			return strings.ToLower(val)
+		}
+		return val
+	case int64, int32, int16, int8, int:
+		return cast.ToInt(v)
+	case float64, float32:
+		return cast.ToFloat64(v)
+	case time.Time:
+		return val
+	case []string:
+		if stringToLower {
+			return helpers.SliceToLower(val)
+		}
+		return v
+	case map[string]interface{}: // JSON and TOML
+		return v
+	case map[interface{}]interface{}: // YAML
+		return v
+	}
+
+	return nil
+}
diff --git a/resources/resource/resourcetypes.go b/resources/resource/resourcetypes.go
index 120d753e4f7..5a58397355b 100644
--- a/resources/resource/resourcetypes.go
+++ b/resources/resource/resourcetypes.go
@@ -14,6 +14,7 @@
 package resource
 
 import (
+	"github.com/gohugoio/hugo/langs"
 	"github.com/gohugoio/hugo/media"
 
 	"github.com/gohugoio/hugo/common/hugio"
@@ -27,19 +28,32 @@ type Cloner interface {
 
 // Resource represents a linkable resource, i.e. a content page, image etc.
 type Resource interface {
-	resourceBase
-
-	// Permalink represents the absolute link to this resource.
-	Permalink() string
+	ResourceTypesProvider
+	ResourceLinksProvider
+	ResourceMetaProvider
+	ResourceParamsProvider
+	ResourceDataProvider
+}
 
-	// RelPermalink represents the host relative link to this resource.
-	RelPermalink() string
+type ResourceTypesProvider interface {
+	// MediaType is this resource's MIME type.
+	MediaType() media.Type
 
 	// ResourceType is the resource type. For most file types, this is the main
 	// part of the MIME type, e.g. "image", "application", "text" etc.
 	// For content pages, this value is "page".
 	ResourceType() string
+}
 
+type ResourceLinksProvider interface {
+	// Permalink represents the absolute link to this resource.
+	Permalink() string
+
+	// RelPermalink represents the host relative link to this resource.
+	RelPermalink() string
+}
+
+type ResourceMetaProvider interface {
 	// Name is the logical name of this resource. This can be set in the front matter
 	// metadata for this resource. If not set, Hugo will assign a value.
 	// This will in most cases be the base filename.
@@ -50,20 +64,17 @@ type Resource interface {
 
 	// Title returns the title if set in front matter. For content pages, this will be the expected value.
 	Title() string
+}
 
-	// Resource specific data set by Hugo.
-	// One example would be.Data.Digest for fingerprinted resources.
-	Data() interface{}
-
+type ResourceParamsProvider interface {
 	// Params set in front matter for this resource.
 	Params() map[string]interface{}
 }
 
-// resourceBase pulls out the minimal set of operations to define a Resource,
-// to simplify testing etc.
-type resourceBase interface {
-	// MediaType is this resource's MIME type.
-	MediaType() media.Type
+type ResourceDataProvider interface {
+	// Resource specific data set by Hugo.
+	// One example would be.Data.Digest for fingerprinted resources.
+	Data() interface{}
 }
 
 // ResourcesLanguageMerger describes an interface for merging resources from a
@@ -81,11 +92,15 @@ type Identifier interface {
 
 // ContentResource represents a Resource that provides a way to get to its content.
 // Most Resource types in Hugo implements this interface, including Page.
-// This should be used with care, as it will read the file content into memory, but it
-// should be cached as effectively as possible by the implementation.
 type ContentResource interface {
-	resourceBase
+	MediaType() media.Type
+	ContentProvider
+}
 
+// ContentProvider provides Content.
+// This should be used with care, as it will read the file content into memory, but it
+// should be cached as effectively as possible by the implementation.
+type ContentProvider interface {
 	// Content returns this resource's content. It will be equivalent to reading the content
 	// that RelPermalink points to in the published folder.
 	// The return type will be contextual, and should be what you would expect:
@@ -101,6 +116,51 @@ type OpenReadSeekCloser func() (hugio.ReadSeekCloser, error)
 
 // ReadSeekCloserResource is a Resource that supports loading its content.
 type ReadSeekCloserResource interface {
-	resourceBase
+	MediaType() media.Type
 	ReadSeekCloser() (hugio.ReadSeekCloser, error)
 }
+
+// LengthProvider is a Resource that provides a length
+// (typically the length of the content).
+type LengthProvider interface {
+	Len() int
+}
+
+// LanguageProvider is a Resource in a language.
+type LanguageProvider interface {
+	Language() *langs.Language
+}
+
+// TranslationKeyProvider connects translations of the same Resource.
+type TranslationKeyProvider interface {
+	TranslationKey() string
+}
+
+type resourceTypesHolder struct {
+	mediaType    media.Type
+	resourceType string
+}
+
+func (r resourceTypesHolder) MediaType() media.Type {
+	return r.mediaType
+}
+
+func (r resourceTypesHolder) ResourceType() string {
+	return r.resourceType
+}
+
+func NewResourceTypesProvider(mediaType media.Type, resourceType string) ResourceTypesProvider {
+	return resourceTypesHolder{mediaType: mediaType, resourceType: resourceType}
+}
+
+type languageHolder struct {
+	lang *langs.Language
+}
+
+func (l languageHolder) Language() *langs.Language {
+	return l.lang
+}
+
+func NewLanguageProvider(lang *langs.Language) LanguageProvider {
+	return languageHolder{lang: lang}
+}
diff --git a/resources/resource_metadata.go b/resources/resource_metadata.go
index 0830dfc594b..e019133d79f 100644
--- a/resources/resource_metadata.go
+++ b/resources/resource_metadata.go
@@ -47,7 +47,6 @@ const counterPlaceHolder = ":counter"
 // The `name` and `title` metadata field support shell-matched collection it got a match in.
 // See https://golang.org/pkg/path/#Match
 func AssignMetadata(metadata []map[string]interface{}, resources ...resource.Resource) error {
-
 	counters := make(map[string]int)
 
 	for _, r := range resources {
diff --git a/resources/resource_metadata_test.go b/resources/resource_metadata_test.go
index a1a2a738c55..1dd452ebf50 100644
--- a/resources/resource_metadata_test.go
+++ b/resources/resource_metadata_test.go
@@ -90,8 +90,8 @@ func TestAssignMetadata(t *testing.T) {
 			_, p1_2 := foo2.Params()["param1"]
 			_, p2_2 := logo2.Params()["param2"]
 
-			icon1, _ := logo2.Params()["icon"]
-			icon2, _ := foo2.Params()["icon"]
+			icon1 := logo2.Params()["icon"]
+			icon2 := foo2.Params()["icon"]
 
 			assert.True(p1)
 			assert.True(p2)
diff --git a/resources/resource_test.go b/resources/resource_test.go
index be2706e45b0..af7867eb1c9 100644
--- a/resources/resource_test.go
+++ b/resources/resource_test.go
@@ -16,7 +16,6 @@ package resources
 import (
 	"fmt"
 	"math/rand"
-	"path"
 	"path/filepath"
 	"strings"
 	"testing"
@@ -45,9 +44,8 @@ func TestGenericResourceWithLinkFacory(t *testing.T) {
 	assert := require.New(t)
 	spec := newTestResourceSpec(assert)
 
-	factory := func(s string) string {
-		return path.Join("/foo", s)
-	}
+	factory := newTargetPaths("/foo")
+
 	r := spec.newGenericResource(nil, factory, nil, "/a/foo.css", "foo.css", media.CSSType)
 
 	assert.Equal("https://example.com/foo/foo.css", r.Permalink())
diff --git a/resources/testhelpers_test.go b/resources/testhelpers_test.go
index d0fcb59e7d3..200a795e330 100644
--- a/resources/testhelpers_test.go
+++ b/resources/testhelpers_test.go
@@ -9,7 +9,6 @@ import (
 	"io"
 	"io/ioutil"
 	"os"
-	"path"
 	"runtime"
 	"strings"
 
@@ -18,6 +17,7 @@ import (
 	"github.com/gohugoio/hugo/hugofs"
 	"github.com/gohugoio/hugo/media"
 	"github.com/gohugoio/hugo/output"
+	"github.com/gohugoio/hugo/resources/page"
 	"github.com/gohugoio/hugo/resources/resource"
 	"github.com/spf13/afero"
 	"github.com/spf13/viper"
@@ -61,11 +61,20 @@ func newTestResourceSpecForBaseURL(assert *require.Assertions, baseURL string) *
 	return spec
 }
 
+func newTargetPaths(link string) func() page.TargetPaths {
+	return func() page.TargetPaths {
+		return page.TargetPaths{
+			SubResourceBaseTarget: filepath.FromSlash(link),
+			SubResourceBaseLink:   link,
+		}
+	}
+}
+
 func newTestResourceOsFs(assert *require.Assertions) *Spec {
 	cfg := viper.New()
 	cfg.Set("baseURL", "https://example.com")
 
-	workDir, err := ioutil.TempDir("", "hugores")
+	workDir, _ := ioutil.TempDir("", "hugores")
 
 	if runtime.GOOS == "darwin" && !strings.HasPrefix(workDir, "/private") {
 		// To get the entry folder in line with the rest. This its a little bit
@@ -124,11 +133,9 @@ func fetchResourceForSpec(spec *Spec, assert *require.Assertions, name string) r
 	src.Close()
 	assert.NoError(err)
 
-	factory := func(s string) string {
-		return path.Join("/a", s)
-	}
+	factory := newTargetPaths("/a")
 
-	r, err := spec.New(ResourceSourceDescriptor{TargetPathBuilder: factory, SourceFilename: name})
+	r, err := spec.New(ResourceSourceDescriptor{TargetPaths: factory, SourceFilename: name})
 	assert.NoError(err)
 
 	return r.(resource.ContentResource)
diff --git a/resources/transform.go b/resources/transform.go
index fd3ae1ae673..934c713277b 100644
--- a/resources/transform.go
+++ b/resources/transform.go
@@ -320,7 +320,7 @@ func (r *transformedResource) transform(setContent, publish bool) (err error) {
 			key = key + "_" + v.transformation.Key().key()
 		case permalinker:
 			r.linker = v
-			p := v.targetPath()
+			p := v.TargetPath()
 			if p == "" {
 				panic("target path needed for key creation")
 			}
@@ -375,7 +375,7 @@ func (r *transformedResource) transform(setContent, publish bool) (err error) {
 	tctx.To = b1
 
 	if r.linker != nil {
-		tctx.InPath = r.linker.targetPath()
+		tctx.InPath = r.linker.TargetPath()
 		tctx.SourcePath = tctx.InPath
 	}
 
diff --git a/source/fileInfo.go b/source/fileInfo.go
index ad302f4703c..752f104e869 100644
--- a/source/fileInfo.go
+++ b/source/fileInfo.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -21,6 +21,8 @@ import (
 	"strings"
 	"sync"
 
+	"github.com/gohugoio/hugo/common/hugio"
+
 	"github.com/spf13/afero"
 
 	"github.com/gohugoio/hugo/hugofs"
@@ -35,34 +37,46 @@ var (
 )
 
 // File represents a source file.
+// This is a temporary construct until we resolve page.Page conflicts.
+// TODO(bep) remove this construct once we have resolved page deprecations
 type File interface {
+	fileOverlap
+	FileWithoutOverlap
+}
 
-	// Filename gets the full path and filename to the file.
-	Filename() string
-
+// Temporary to solve duplicate/deprecated names in page.Page
+type fileOverlap interface {
 	// Path gets the relative path including file name and extension.
 	// The directory is relative to the content root.
 	Path() string
 
+	// Section is first directory below the content root.
+	// For page bundles in root, the Section will be empty.
+	Section() string
+
+	// Lang is the language code for this page. It will be the
+	// same as the site's language code.
+	Lang() string
+}
+
+type FileWithoutOverlap interface {
+
+	// Filename gets the full path and filename to the file.
+	Filename() string
+
 	// Dir gets the name of the directory that contains this file.
 	// The directory is relative to the content root.
 	Dir() string
 
 	// Extension gets the file extension, i.e "myblogpost.md" will return "md".
 	Extension() string
+
 	// Ext is an alias for Extension.
 	Ext() string // Hmm... Deprecate Extension
 
-	// Lang for this page, if `Multilingual` is enabled on your site.
-	Lang() string
-
 	// LogicalName is filename and extension of the file.
 	LogicalName() string
 
-	// Section is first directory below the content root.
-	// For page bundles in root, the Section will be empty.
-	Section() string
-
 	// BaseFileName is a filename without extension.
 	BaseFileName() string
 
@@ -79,14 +93,12 @@ type File interface {
 	UniqueID() string
 
 	FileInfo() os.FileInfo
-
-	String() string
 }
 
 // A ReadableFile is a File that is readable.
 type ReadableFile interface {
 	File
-	Open() (io.ReadCloser, error)
+	Open() (hugio.ReadSeekCloser, error)
 }
 
 // FileInfo describes a source file.
@@ -174,7 +186,7 @@ func (fi *FileInfo) FileInfo() os.FileInfo { return fi.fi }
 func (fi *FileInfo) String() string { return fi.BaseFileName() }
 
 // Open implements ReadableFile.
-func (fi *FileInfo) Open() (io.ReadCloser, error) {
+func (fi *FileInfo) Open() (hugio.ReadSeekCloser, error) {
 	f, err := fi.sp.SourceFs.Open(fi.Filename())
 	return f, err
 }
@@ -201,6 +213,16 @@ func (fi *FileInfo) init() {
 	})
 }
 
+// NewTestFile creates a partially filled File used in unit tests.
+// TODO(bep) improve this package
+func NewTestFile(filename string) *FileInfo {
+	base := filepath.Base(filepath.Dir(filename))
+	return &FileInfo{
+		filename:            filename,
+		translationBaseName: base,
+	}
+}
+
 // NewFileInfo returns a new FileInfo structure.
 func (sp *SourceSpec) NewFileInfo(baseDir, filename string, isLeafBundle bool, fi os.FileInfo) *FileInfo {
 
diff --git a/tpl/collections/apply_test.go b/tpl/collections/apply_test.go
index 0878844b2cd..edec3da18b0 100644
--- a/tpl/collections/apply_test.go
+++ b/tpl/collections/apply_test.go
@@ -1,4 +1,4 @@
-// Copyright 2017 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -29,6 +29,10 @@ func (templateFinder) Lookup(name string) (tpl.Template, bool) {
 	return nil, false
 }
 
+func (templateFinder) LookupVariant(name string, variants tpl.TemplateVariants) (tpl.Template, bool, bool) {
+	return nil, false, false
+}
+
 func (templateFinder) GetFuncs() map[string]interface{} {
 	return map[string]interface{}{
 		"print": fmt.Sprint,
diff --git a/tpl/collections/collections.go b/tpl/collections/collections.go
index bad65369fab..92a61e575c6 100644
--- a/tpl/collections/collections.go
+++ b/tpl/collections/collections.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -329,13 +329,17 @@ func (ns *Namespace) Group(key interface{}, items interface{}) (interface{}, err
 		return nil, errors.New("nil is not a valid key to group by")
 	}
 
+	if g, ok := items.(collections.Grouper); ok {
+		return g.Group(key, items)
+	}
+
 	in := newSliceElement(items)
 
 	if g, ok := in.(collections.Grouper); ok {
 		return g.Group(key, items)
 	}
 
-	return nil, fmt.Errorf("grouping not supported for type %T", items)
+	return nil, fmt.Errorf("grouping not supported for type %T %T", items, in)
 }
 
 // IsSet returns whether a given array, channel, slice, or map has a key
diff --git a/tpl/collections/collections_test.go b/tpl/collections/collections_test.go
index 0edb8299f3a..103aee59e67 100644
--- a/tpl/collections/collections_test.go
+++ b/tpl/collections/collections_test.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -311,16 +311,16 @@ func TestIn(t *testing.T) {
 	}
 }
 
-type page struct {
+type testPage struct {
 	Title string
 }
 
-func (p page) String() string {
+func (p testPage) String() string {
 	return "p-" + p.Title
 }
 
-type pagesPtr []*page
-type pagesVals []page
+type pagesPtr []*testPage
+type pagesVals []testPage
 
 func TestIntersect(t *testing.T) {
 	t.Parallel()
@@ -328,15 +328,15 @@ func TestIntersect(t *testing.T) {
 	ns := New(&deps.Deps{})
 
 	var (
-		p1 = &page{"A"}
-		p2 = &page{"B"}
-		p3 = &page{"C"}
-		p4 = &page{"D"}
-
-		p1v = page{"A"}
-		p2v = page{"B"}
-		p3v = page{"C"}
-		p4v = page{"D"}
+		p1 = &testPage{"A"}
+		p2 = &testPage{"B"}
+		p3 = &testPage{"C"}
+		p4 = &testPage{"D"}
+
+		p1v = testPage{"A"}
+		p2v = testPage{"B"}
+		p3v = testPage{"C"}
+		p4v = testPage{"D"}
 	)
 
 	for i, test := range []struct {
@@ -672,14 +672,14 @@ func TestUnion(t *testing.T) {
 	ns := New(&deps.Deps{})
 
 	var (
-		p1 = &page{"A"}
-		p2 = &page{"B"}
+		p1 = &testPage{"A"}
+		p2 = &testPage{"B"}
 		//		p3 = &page{"C"}
-		p4 = &page{"D"}
+		p4 = &testPage{"D"}
 
-		p1v = page{"A"}
+		p1v = testPage{"A"}
 		//p2v = page{"B"}
-		p3v = page{"C"}
+		p3v = testPage{"C"}
 		//p4v = page{"D"}
 	)
 
diff --git a/tpl/template.go b/tpl/template.go
index 3225814c02d..07152166a2c 100644
--- a/tpl/template.go
+++ b/tpl/template.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -21,6 +21,8 @@ import (
 	"strings"
 	"time"
 
+	"github.com/gohugoio/hugo/output"
+
 	"github.com/gohugoio/hugo/common/herrors"
 
 	"github.com/gohugoio/hugo/hugofs"
@@ -37,7 +39,8 @@ import (
 )
 
 var (
-	_ TemplateExecutor = (*TemplateAdapter)(nil)
+	_ TemplateExecutor     = (*TemplateAdapter)(nil)
+	_ TemplateInfoProvider = (*TemplateAdapter)(nil)
 )
 
 // TemplateHandler manages the collection of templates.
@@ -53,17 +56,47 @@ type TemplateHandler interface {
 	RebuildClone()
 }
 
+// TemplateVariants describes the possible variants of a template.
+// All of these may be empty.
+type TemplateVariants struct {
+	Language     string
+	OutputFormat output.Format
+}
+
 // TemplateFinder finds templates.
 type TemplateFinder interface {
+	TemplateLookup
+	TemplateLookupVariant
+}
+
+type TemplateLookup interface {
 	Lookup(name string) (Template, bool)
 }
 
+type TemplateLookupVariant interface {
+	// TODO(bep) this currently only works for shortcodes.
+	// We may unify and expand this variant pattern to the
+	// other templates, but we need this now for the shortcodes to
+	// quickly determine if a shortcode has a template for a given
+	// output format.
+	// It returns the template, if it was found or not and if there are
+	// alternative representations (output format, language).
+	// We are currently only interested in output formats, so we should improve
+	// this for speed.
+	LookupVariant(name string, variants TemplateVariants) (Template, bool, bool)
+}
+
 // Template is the common interface between text/template and html/template.
 type Template interface {
 	Execute(wr io.Writer, data interface{}) error
 	Name() string
 }
 
+// TemplateInfoProvider provides some contextual information about a template.
+type TemplateInfoProvider interface {
+	TemplateInfo() Info
+}
+
 // TemplateParser is used to parse ad-hoc templates, e.g. in the Resource chain.
 type TemplateParser interface {
 	Parse(name, tpl string) (Template, error)
@@ -92,6 +125,8 @@ type TemplateAdapter struct {
 	Template
 	Metrics metrics.Provider
 
+	Info Info
+
 	// The filesystem where the templates are stored.
 	Fs afero.Fs
 
@@ -133,6 +168,10 @@ func (t *TemplateAdapter) Execute(w io.Writer, data interface{}) (execErr error)
 	return
 }
 
+func (t *TemplateAdapter) TemplateInfo() Info {
+	return t.Info
+}
+
 // The identifiers may be truncated in the log, e.g.
 // "executing "main" at <$scaled.SRelPermalin...>: can't evaluate field SRelPermalink in type *resource.Image"
 var identifiersRe = regexp.MustCompile("at \\<(.*?)(\\.{3})?\\>:")
diff --git a/tpl/template_info.go b/tpl/template_info.go
new file mode 100644
index 00000000000..8568f46f0ff
--- /dev/null
+++ b/tpl/template_info.go
@@ -0,0 +1,35 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tpl
+
+// Increments on breaking changes.
+const TemplateVersion = 2
+
+// Info holds some info extracted from a parsed template.
+type Info struct {
+
+	// Set for shortcode templates with any {{ .Inner }}
+	IsInner bool
+
+	// Config extracted from template.
+	Config Config
+}
+
+type Config struct {
+	Version int
+}
+
+var DefaultConfig = Config{
+	Version: TemplateVersion,
+}
diff --git a/tpl/tplimpl/ace.go b/tpl/tplimpl/ace.go
index 6fb4ca4398d..7a1f849f40a 100644
--- a/tpl/tplimpl/ace.go
+++ b/tpl/tplimpl/ace.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -14,7 +14,6 @@
 package tplimpl
 
 import (
-	"html/template"
 	"path/filepath"
 
 	"strings"
@@ -52,15 +51,15 @@ func (t *templateHandler) addAceTemplate(name, basePath, innerPath string, baseC
 		return err
 	}
 
-	if err := applyTemplateTransformersToHMLTTemplate(templ); err != nil {
+	isShort := isShortcode(name)
+
+	info, err := applyTemplateTransformersToHMLTTemplate(isShort, templ)
+	if err != nil {
 		return err
 	}
 
-	if strings.Contains(name, "shortcodes") {
-		// We need to keep track of one ot the output format's shortcode template
-		// without knowing the rendering context.
-		clone := template.Must(templ.Clone())
-		t.html.t.AddParseTree(withoutExt, clone.Tree)
+	if isShort {
+		t.addShortcodeVariant(name, info, templ)
 	}
 
 	return nil
diff --git a/tpl/tplimpl/embedded/generate/generate.go b/tpl/tplimpl/embedded/generate/generate.go
index 76a167a99a7..a48e00756ac 100644
--- a/tpl/tplimpl/embedded/generate/generate.go
+++ b/tpl/tplimpl/embedded/generate/generate.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -63,7 +63,7 @@ func main() {
 		log.Fatal(err)
 	}
 
-	fmt.Fprint(file, `// Copyright 2018 The Hugo Authors. All rights reserved.
+	fmt.Fprint(file, `// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
diff --git a/tpl/tplimpl/embedded/templates.autogen.go b/tpl/tplimpl/embedded/templates.autogen.go
index ed9ba35ac30..d55e5b307a9 100644
--- a/tpl/tplimpl/embedded/templates.autogen.go
+++ b/tpl/tplimpl/embedded/templates.autogen.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -19,7 +19,13 @@ package embedded
 // EmbeddedTemplates represents all embedded templates.
 var EmbeddedTemplates = [][2]string{
 	{`_default/robots.txt`, `User-agent: *`},
-	{`_default/rss.xml`, `<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
+	{`_default/rss.xml`, `{{- $pages := .Data.Pages -}}
+{{- $limit := .Site.Config.Services.RSS.Limit -}}
+{{- if ge $limit 1 -}}
+{{- $pages = $pages | first $limit -}}
+{{- end -}}
+{{- printf "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>" | safeHTML }}
+<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
   <channel>
     <title>{{ if eq  .Title  .Site.Title }}{{ .Site.Title }}{{ else }}{{ with .Title }}{{.}} on {{ end }}{{ .Site.Title }}{{ end }}</title>
     <link>{{ .Permalink }}</link>
@@ -33,7 +39,7 @@ var EmbeddedTemplates = [][2]string{
     {{ with .OutputFormats.Get "RSS" }}
 	{{ printf "<atom:link href=%q rel=\"self\" type=%q />" .Permalink .MediaType | safeHTML }}
     {{ end }}
-    {{ range .Data.Pages }}
+    {{ range $pages }}
     <item>
       <title>{{ .Title }}</title>
       <link>{{ .Permalink }}</link>
@@ -45,7 +51,8 @@ var EmbeddedTemplates = [][2]string{
     {{ end }}
   </channel>
 </rss>`},
-	{`_default/sitemap.xml`, `<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"
+	{`_default/sitemap.xml`, `{{ printf "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>" | safeHTML }}
+<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"
   xmlns:xhtml="http://www.w3.org/1999/xhtml">
   {{ range .Data.Pages }}
   <url>
@@ -55,18 +62,19 @@ var EmbeddedTemplates = [][2]string{
     <priority>{{ .Sitemap.Priority }}</priority>{{ end }}{{ if .IsTranslated }}{{ range .Translations }}
     <xhtml:link
                 rel="alternate"
-                hreflang="{{ .Lang }}"
+                hreflang="{{ .Language.Lang }}"
                 href="{{ .Permalink }}"
                 />{{ end }}
     <xhtml:link
                 rel="alternate"
-                hreflang="{{ .Lang }}"
+                hreflang="{{ .Language.Lang }}"
                 href="{{ .Permalink }}"
                 />{{ end }}
   </url>
   {{ end }}
 </urlset>`},
-	{`_default/sitemapindex.xml`, `<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
+	{`_default/sitemapindex.xml`, `{{ printf "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>" | safeHTML }}
+<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
 	{{ range . }}
 	<sitemap>
 	   	<loc>{{ .SitemapAbsURL }}</loc>
@@ -77,7 +85,7 @@ var EmbeddedTemplates = [][2]string{
 	{{ end }}
 </sitemapindex>
 `},
-	{`disqus.html`, `{{- $pc := .Page.Site.Config.Privacy.Disqus -}}
+	{`disqus.html`, `{{- $pc := .Site.Config.Privacy.Disqus -}}
 {{- if not $pc.Disable -}}
 {{ if .Site.DisqusShortname }}<div id="disqus_thread"></div>
 <script type="application/javascript">
diff --git a/tpl/tplimpl/embedded/templates/_default/rss.xml b/tpl/tplimpl/embedded/templates/_default/rss.xml
index abba0b28add..675ecd43c3a 100644
--- a/tpl/tplimpl/embedded/templates/_default/rss.xml
+++ b/tpl/tplimpl/embedded/templates/_default/rss.xml
@@ -1,3 +1,9 @@
+{{- $pages := .Data.Pages -}}
+{{- $limit := .Site.Config.Services.RSS.Limit -}}
+{{- if ge $limit 1 -}}
+{{- $pages = $pages | first $limit -}}
+{{- end -}}
+{{- printf "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>" | safeHTML }}
 <rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
   <channel>
     <title>{{ if eq  .Title  .Site.Title }}{{ .Site.Title }}{{ else }}{{ with .Title }}{{.}} on {{ end }}{{ .Site.Title }}{{ end }}</title>
@@ -12,7 +18,7 @@
     {{ with .OutputFormats.Get "RSS" }}
 	{{ printf "<atom:link href=%q rel=\"self\" type=%q />" .Permalink .MediaType | safeHTML }}
     {{ end }}
-    {{ range .Data.Pages }}
+    {{ range $pages }}
     <item>
       <title>{{ .Title }}</title>
       <link>{{ .Permalink }}</link>
diff --git a/tpl/tplimpl/embedded/templates/_default/sitemap.xml b/tpl/tplimpl/embedded/templates/_default/sitemap.xml
index e0a2b189d00..f5b44c410e7 100644
--- a/tpl/tplimpl/embedded/templates/_default/sitemap.xml
+++ b/tpl/tplimpl/embedded/templates/_default/sitemap.xml
@@ -1,3 +1,4 @@
+{{ printf "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>" | safeHTML }}
 <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"
   xmlns:xhtml="http://www.w3.org/1999/xhtml">
   {{ range .Data.Pages }}
@@ -8,12 +9,12 @@
     <priority>{{ .Sitemap.Priority }}</priority>{{ end }}{{ if .IsTranslated }}{{ range .Translations }}
     <xhtml:link
                 rel="alternate"
-                hreflang="{{ .Lang }}"
+                hreflang="{{ .Language.Lang }}"
                 href="{{ .Permalink }}"
                 />{{ end }}
     <xhtml:link
                 rel="alternate"
-                hreflang="{{ .Lang }}"
+                hreflang="{{ .Language.Lang }}"
                 href="{{ .Permalink }}"
                 />{{ end }}
   </url>
diff --git a/tpl/tplimpl/embedded/templates/_default/sitemapindex.xml b/tpl/tplimpl/embedded/templates/_default/sitemapindex.xml
index 4cd289fe9f3..60724c7b81a 100644
--- a/tpl/tplimpl/embedded/templates/_default/sitemapindex.xml
+++ b/tpl/tplimpl/embedded/templates/_default/sitemapindex.xml
@@ -1,3 +1,4 @@
+{{ printf "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>" | safeHTML }}
 <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
 	{{ range . }}
 	<sitemap>
diff --git a/tpl/tplimpl/embedded/templates/disqus.html b/tpl/tplimpl/embedded/templates/disqus.html
index 178d84caf8a..ab51bb5c0b7 100644
--- a/tpl/tplimpl/embedded/templates/disqus.html
+++ b/tpl/tplimpl/embedded/templates/disqus.html
@@ -1,4 +1,4 @@
-{{- $pc := .Page.Site.Config.Privacy.Disqus -}}
+{{- $pc := .Site.Config.Privacy.Disqus -}}
 {{- if not $pc.Disable -}}
 {{ if .Site.DisqusShortname }}<div id="disqus_thread"></div>
 <script type="application/javascript">
diff --git a/tpl/tplimpl/shortcodes.go b/tpl/tplimpl/shortcodes.go
new file mode 100644
index 00000000000..8577fbeedc9
--- /dev/null
+++ b/tpl/tplimpl/shortcodes.go
@@ -0,0 +1,148 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tplimpl
+
+import (
+	"strings"
+
+	"github.com/gohugoio/hugo/tpl"
+)
+
+// Currently lang, outFormat, suffix
+const numTemplateVariants = 3
+
+type shortcodeVariant struct {
+
+	// The possible variants: lang, outFormat, suffix
+	// gtag
+	// gtag.html
+	// gtag.no.html
+	// gtag.no.amp.html
+	// A slice of length numTemplateVariants.
+	variants []string
+
+	info  tpl.Info
+	templ tpl.Template
+}
+
+type shortcodeTemplates struct {
+	variants []shortcodeVariant
+}
+
+func (s *shortcodeTemplates) indexOf(variants []string) int {
+L:
+	for i, v1 := range s.variants {
+		for i, v2 := range v1.variants {
+			if v2 != variants[i] {
+				continue L
+			}
+		}
+		return i
+	}
+	return -1
+}
+
+func (s *shortcodeTemplates) fromVariants(variants tpl.TemplateVariants) (shortcodeVariant, bool) {
+	return s.fromVariantsSlice([]string{
+		variants.Language,
+		strings.ToLower(variants.OutputFormat.Name),
+		variants.OutputFormat.MediaType.Suffix(),
+	})
+}
+
+// Get the most specific template given a full name, e.g  gtag.no.amp.html.
+func (s *shortcodeTemplates) fromName(name string) (shortcodeVariant, bool) {
+	return s.fromVariantsSlice(templateVariants(name))
+}
+
+func (s *shortcodeTemplates) fromVariantsSlice(variants []string) (shortcodeVariant, bool) {
+	var (
+		bestMatch       shortcodeVariant
+		bestMatchWeight int
+	)
+
+	for _, variant := range s.variants {
+		w := s.compareVariants(variants, variant.variants)
+		if bestMatchWeight == 0 || w > bestMatchWeight {
+			bestMatch = variant
+			bestMatchWeight = w
+		}
+	}
+
+	return bestMatch, true
+}
+
+// calculate a weight for two string slices of same lenght.
+// higher value means "better match".
+func (s *shortcodeTemplates) compareVariants(a, b []string) int {
+
+	weight := 0
+	for i, av := range a {
+		bv := b[i]
+		if av == bv {
+			weight++
+		} else {
+			weight--
+		}
+	}
+	return weight
+}
+
+func templateVariants(name string) []string {
+	_, variants := templateNameAndVariants(name)
+	return variants
+}
+
+func templateNameAndVariants(name string) (string, []string) {
+
+	variants := make([]string, numTemplateVariants)
+
+	parts := strings.Split(name, ".")
+
+	if len(parts) <= 1 {
+		// No variants.
+		return name, variants
+	}
+
+	name = parts[0]
+	parts = parts[1:]
+	lp := len(parts)
+	start := len(variants) - lp
+
+	for i, j := start, 0; i < len(variants); i, j = i+1, j+1 {
+		variants[i] = parts[j]
+	}
+
+	if lp > 1 && lp < len(variants) {
+		for i := lp - 1; i > 0; i-- {
+			variants[i-1] = variants[i]
+		}
+	}
+
+	if lp == 1 {
+		// Suffix only. Duplicate it into the output format field to
+		// make HTML win over AMP.
+		variants[len(variants)-2] = variants[len(variants)-1]
+	}
+
+	return name, variants
+}
+
+func isShortcode(name string) bool {
+	return strings.Contains(name, "shortcodes/")
+}
+
+func isInternal(name string) bool {
+	return strings.HasPrefix(name, "_internal/")
+}
diff --git a/tpl/tplimpl/shortcodes_test.go b/tpl/tplimpl/shortcodes_test.go
new file mode 100644
index 00000000000..6909feda72e
--- /dev/null
+++ b/tpl/tplimpl/shortcodes_test.go
@@ -0,0 +1,94 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tplimpl
+
+import (
+	"fmt"
+	"testing"
+
+	"github.com/stretchr/testify/require"
+)
+
+func TestShortcodesTemplate(t *testing.T) {
+
+	t.Run("isShortcode", func(t *testing.T) {
+		assert := require.New(t)
+		assert.True(isShortcode("shortcodes/figures.html"))
+		assert.True(isShortcode("_internal/shortcodes/figures.html"))
+		assert.False(isShortcode("shortcodes\\figures.html"))
+		assert.False(isShortcode("myshortcodes"))
+
+	})
+
+	t.Run("variantsFromName", func(t *testing.T) {
+		assert := require.New(t)
+		assert.Equal([]string{"", "html", "html"}, templateVariants("figure.html"))
+		assert.Equal([]string{"no", "no", "html"}, templateVariants("figure.no.html"))
+		assert.Equal([]string{"no", "amp", "html"}, templateVariants("figure.no.amp.html"))
+		assert.Equal([]string{"amp", "amp", "html"}, templateVariants("figure.amp.html"))
+
+		name, variants := templateNameAndVariants("figure.html")
+		assert.Equal("figure", name)
+		assert.Equal([]string{"", "html", "html"}, variants)
+
+	})
+
+	t.Run("compareVariants", func(t *testing.T) {
+		assert := require.New(t)
+		var s *shortcodeTemplates
+
+		tests := []struct {
+			name     string
+			name1    string
+			name2    string
+			expected int
+		}{
+			{"Same suffix", "figure.html", "figure.html", 3},
+			{"Same suffix and output format", "figure.html.html", "figure.html.html", 3},
+			{"Same suffix, output format and language", "figure.no.html.html", "figure.no.html.html", 3},
+			{"No suffix", "figure", "figure", 3},
+			{"Different output format", "figure.amp.html", "figure.html.html", -1},
+			{"One with output format, one without", "figure.amp.html", "figure.html", -1},
+		}
+
+		for i, test := range tests {
+			w := s.compareVariants(templateVariants(test.name1), templateVariants(test.name2))
+			assert.Equal(test.expected, w, fmt.Sprintf("[%d] %s", i, test.name))
+		}
+
+	})
+
+	t.Run("indexOf", func(t *testing.T) {
+		assert := require.New(t)
+
+		s := &shortcodeTemplates{
+			variants: []shortcodeVariant{
+				shortcodeVariant{variants: []string{"a", "b", "c"}},
+				shortcodeVariant{variants: []string{"a", "b", "d"}},
+			},
+		}
+
+		assert.Equal(0, s.indexOf([]string{"a", "b", "c"}))
+		assert.Equal(1, s.indexOf([]string{"a", "b", "d"}))
+		assert.Equal(-1, s.indexOf([]string{"a", "b", "x"}))
+
+	})
+
+	t.Run("Template", func(t *testing.T) {
+		assert := require.New(t)
+
+		assert.True(true)
+
+	})
+}
diff --git a/tpl/tplimpl/template.go b/tpl/tplimpl/template.go
index 26a418108a2..d6deba2dfa2 100644
--- a/tpl/tplimpl/template.go
+++ b/tpl/tplimpl/template.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -86,6 +86,10 @@ type templateFuncsterSetter interface {
 type templateHandler struct {
 	mu sync.Mutex
 
+	// shortcodes maps shortcode name to template variants
+	// (language, output format etc.) of that shortcode.
+	shortcodes map[string]*shortcodeTemplates
+
 	// text holds all the pure text templates.
 	text *textTemplates
 	html *htmlTemplates
@@ -103,6 +107,29 @@ type templateHandler struct {
 	*deps.Deps
 }
 
+func (t *templateHandler) addShortcodeVariant(name string, info tpl.Info, templ tpl.Template) {
+	shortcodename, variants := templateNameAndVariants(path.Base(name))
+
+	templs, found := t.shortcodes[shortcodename]
+	if !found {
+		templs = &shortcodeTemplates{}
+		t.shortcodes[shortcodename] = templs
+	}
+
+	sv := shortcodeVariant{variants: variants, info: info, templ: templ}
+
+	i := templs.indexOf(variants)
+
+	if i != -1 {
+		// Only replace if it's an override of an internal template.
+		if !isInternal(name) {
+			templs.variants[i] = sv
+		}
+	} else {
+		templs.variants = append(templs.variants, sv)
+	}
+}
+
 // NewTextTemplate provides a text template parser that has all the Hugo
 // template funcs etc. built-in.
 func (t *templateHandler) NewTextTemplate() tpl.TemplateParseFinder {
@@ -112,8 +139,22 @@ func (t *templateHandler) NewTextTemplate() tpl.TemplateParseFinder {
 	tt := &textTemplate{t: texttemplate.New("")}
 	t.extTextTemplates = append(t.extTextTemplates, tt)
 
-	return tt
+	return struct {
+		tpl.TemplateParser
+		tpl.TemplateLookup
+		tpl.TemplateLookupVariant
+	}{
+		tt,
+		tt,
+		new(nopLookupVariant),
+	}
+
+}
+
+type nopLookupVariant int
 
+func (l nopLookupVariant) LookupVariant(name string, variants tpl.TemplateVariants) (tpl.Template, bool, bool) {
+	return nil, false, false
 }
 
 func (t *templateHandler) Debug() {
@@ -143,13 +184,85 @@ func (t *templateHandler) Lookup(name string) (tpl.Template, bool) {
 
 }
 
+// This currently only applies to shortcodes and what we get here is the
+// shortcode name.
+func (t *templateHandler) LookupVariant(name string, variants tpl.TemplateVariants) (tpl.Template, bool, bool) {
+	name = path.Base(name)
+	s, found := t.shortcodes[name]
+	if !found {
+		return nil, false, false
+	}
+
+	sv, found := s.fromVariants(variants)
+	if !found {
+		return nil, false, false
+	}
+
+	more := len(s.variants) > 1
+
+	return &tpl.TemplateAdapter{
+		Template:             sv.templ,
+		Info:                 sv.info,
+		Metrics:              t.Deps.Metrics,
+		Fs:                   t.layoutsFs,
+		NameBaseTemplateName: t.html.nameBaseTemplateName}, true, more
+
+}
+
+func (t *textTemplates) LookupVariant(name string, variants tpl.TemplateVariants) (tpl.Template, bool, bool) {
+	return t.handler.LookupVariant(name, variants)
+}
+
+func (t *htmlTemplates) LookupVariant(name string, variants tpl.TemplateVariants) (tpl.Template, bool, bool) {
+	return t.handler.LookupVariant(name, variants)
+}
+
+func (t *templateHandler) cloneTemplate(in interface{}) tpl.Template {
+	switch templ := in.(type) {
+	case *texttemplate.Template:
+		return texttemplate.Must(templ.Clone())
+	case *template.Template:
+		return template.Must(templ.Clone())
+	}
+
+	panic(fmt.Sprintf("%T is not a template", in))
+}
+
+func (t *templateHandler) setFuncMapInTemplate(in interface{}, funcs map[string]interface{}) {
+	switch templ := in.(type) {
+	case *texttemplate.Template:
+		templ.Funcs(funcs)
+		return
+	case *template.Template:
+		templ.Funcs(funcs)
+		return
+	}
+
+	panic(fmt.Sprintf("%T is not a template", in))
+}
+
 func (t *templateHandler) clone(d *deps.Deps) *templateHandler {
 	c := &templateHandler{
-		Deps:      d,
-		layoutsFs: d.BaseFs.Layouts.Fs,
-		html:      &htmlTemplates{t: template.Must(t.html.t.Clone()), overlays: make(map[string]*template.Template), templatesCommon: t.html.templatesCommon},
-		text:      &textTemplates{textTemplate: &textTemplate{t: texttemplate.Must(t.text.t.Clone())}, overlays: make(map[string]*texttemplate.Template), templatesCommon: t.text.templatesCommon},
-		errors:    make([]*templateErr, 0),
+		Deps:       d,
+		layoutsFs:  d.BaseFs.Layouts.Fs,
+		shortcodes: make(map[string]*shortcodeTemplates),
+		html:       &htmlTemplates{t: template.Must(t.html.t.Clone()), overlays: make(map[string]*template.Template), templatesCommon: t.html.templatesCommon},
+		text:       &textTemplates{textTemplate: &textTemplate{t: texttemplate.Must(t.text.t.Clone())}, overlays: make(map[string]*texttemplate.Template), templatesCommon: t.text.templatesCommon},
+		errors:     make([]*templateErr, 0),
+	}
+
+	for k, v := range t.shortcodes {
+		other := *v
+		variantsc := make([]shortcodeVariant, len(v.variants))
+		for i, variant := range v.variants {
+			variantsc[i] = shortcodeVariant{
+				info:     variant.info,
+				variants: variant.variants,
+				templ:    t.cloneTemplate(variant.templ),
+			}
+		}
+		other.variants = variantsc
+		c.shortcodes[k] = &other
 	}
 
 	d.Tmpl = c
@@ -193,11 +306,12 @@ func newTemplateAdapter(deps *deps.Deps) *templateHandler {
 		templatesCommon: common,
 	}
 	h := &templateHandler{
-		Deps:      deps,
-		layoutsFs: deps.BaseFs.Layouts.Fs,
-		html:      htmlT,
-		text:      textT,
-		errors:    make([]*templateErr, 0),
+		Deps:       deps,
+		layoutsFs:  deps.BaseFs.Layouts.Fs,
+		shortcodes: make(map[string]*shortcodeTemplates),
+		html:       htmlT,
+		text:       textT,
+		errors:     make([]*templateErr, 0),
 	}
 
 	common.handler = h
@@ -215,6 +329,8 @@ type templatesCommon struct {
 	nameBaseTemplateName map[string]string
 }
 type htmlTemplates struct {
+	mu sync.RWMutex
+
 	*templatesCommon
 
 	t *template.Template
@@ -245,6 +361,8 @@ func (t *htmlTemplates) Lookup(name string) (tpl.Template, bool) {
 }
 
 func (t *htmlTemplates) lookup(name string) *template.Template {
+	t.mu.RLock()
+	defer t.mu.RUnlock()
 
 	// Need to check in the overlay registry first as it will also be found below.
 	if t.overlays != nil {
@@ -337,21 +455,23 @@ func (t *templateHandler) LoadTemplates(prefix string) error {
 }
 
 func (t *htmlTemplates) addTemplateIn(tt *template.Template, name, tpl string) error {
+	t.mu.Lock()
+	defer t.mu.Unlock()
+
 	templ, err := tt.New(name).Parse(tpl)
 	if err != nil {
 		return err
 	}
 
-	if err := applyTemplateTransformersToHMLTTemplate(templ); err != nil {
+	isShort := isShortcode(name)
+
+	info, err := applyTemplateTransformersToHMLTTemplate(isShort, templ)
+	if err != nil {
 		return err
 	}
 
-	if strings.Contains(name, "shortcodes") {
-		// We need to keep track of one ot the output format's shortcode template
-		// without knowing the rendering context.
-		withoutExt := strings.TrimSuffix(name, path.Ext(name))
-		clone := template.Must(templ.Clone())
-		tt.AddParseTree(withoutExt, clone.Tree)
+	if isShort {
+		t.handler.addShortcodeVariant(name, info, templ)
 	}
 
 	return nil
@@ -371,7 +491,7 @@ type textTemplate struct {
 }
 
 func (t *textTemplate) Parse(name, tpl string) (tpl.Template, error) {
-	return t.parSeIn(t.t, name, tpl)
+	return t.parseIn(t.t, name, tpl)
 }
 
 func (t *textTemplate) Lookup(name string) (tpl.Template, bool) {
@@ -382,7 +502,7 @@ func (t *textTemplate) Lookup(name string) (tpl.Template, bool) {
 	return tpl, tpl != nil
 }
 
-func (t *textTemplate) parSeIn(tt *texttemplate.Template, name, tpl string) (*texttemplate.Template, error) {
+func (t *textTemplate) parseIn(tt *texttemplate.Template, name, tpl string) (*texttemplate.Template, error) {
 	t.mu.Lock()
 	defer t.mu.Unlock()
 
@@ -391,7 +511,7 @@ func (t *textTemplate) parSeIn(tt *texttemplate.Template, name, tpl string) (*te
 		return nil, err
 	}
 
-	if err := applyTemplateTransformersToTextTemplate(templ); err != nil {
+	if _, err := applyTemplateTransformersToTextTemplate(false, templ); err != nil {
 		return nil, err
 	}
 	return templ, nil
@@ -399,21 +519,20 @@ func (t *textTemplate) parSeIn(tt *texttemplate.Template, name, tpl string) (*te
 
 func (t *textTemplates) addTemplateIn(tt *texttemplate.Template, name, tpl string) error {
 	name = strings.TrimPrefix(name, textTmplNamePrefix)
-	templ, err := t.parSeIn(tt, name, tpl)
+	templ, err := t.parseIn(tt, name, tpl)
 	if err != nil {
 		return err
 	}
 
-	if err := applyTemplateTransformersToTextTemplate(templ); err != nil {
+	isShort := isShortcode(name)
+
+	info, err := applyTemplateTransformersToTextTemplate(isShort, templ)
+	if err != nil {
 		return err
 	}
 
-	if strings.Contains(name, "shortcodes") {
-		// We need to keep track of one ot the output format's shortcode template
-		// without knowing the rendering context.
-		withoutExt := strings.TrimSuffix(name, path.Ext(name))
-		clone := texttemplate.Must(templ.Clone())
-		tt.AddParseTree(withoutExt, clone.Tree)
+	if isShort {
+		t.handler.addShortcodeVariant(name, info, templ)
 	}
 
 	return nil
@@ -547,6 +666,12 @@ func (t *templateHandler) initFuncs() {
 
 	}
 
+	for _, v := range t.shortcodes {
+		for _, variant := range v.variants {
+			t.setFuncMapInTemplate(variant.templ, funcMap)
+		}
+	}
+
 	for _, extText := range t.extTextTemplates {
 		extText.t.Funcs(funcMap)
 	}
@@ -612,7 +737,7 @@ func (t *htmlTemplates) handleMaster(name, overlayFilename, masterFilename strin
 	// * https://github.com/golang/go/issues/16101
 	// * https://github.com/gohugoio/hugo/issues/2549
 	overlayTpl = overlayTpl.Lookup(overlayTpl.Name())
-	if err := applyTemplateTransformersToHMLTTemplate(overlayTpl); err != nil {
+	if _, err := applyTemplateTransformersToHMLTTemplate(false, overlayTpl); err != nil {
 		return err
 	}
 
@@ -652,7 +777,7 @@ func (t *textTemplates) handleMaster(name, overlayFilename, masterFilename strin
 	}
 
 	overlayTpl = overlayTpl.Lookup(overlayTpl.Name())
-	if err := applyTemplateTransformersToTextTemplate(overlayTpl); err != nil {
+	if _, err := applyTemplateTransformersToTextTemplate(false, overlayTpl); err != nil {
 		return err
 	}
 	t.overlays[name] = overlayTpl
@@ -722,15 +847,15 @@ func (t *templateHandler) addTemplateFile(name, baseTemplatePath, path string) e
 			return err
 		}
 
-		if err := applyTemplateTransformersToHMLTTemplate(templ); err != nil {
+		isShort := isShortcode(name)
+
+		info, err := applyTemplateTransformersToHMLTTemplate(isShort, templ)
+		if err != nil {
 			return err
 		}
 
-		if strings.Contains(templateName, "shortcodes") {
-			// We need to keep track of one ot the output format's shortcode template
-			// without knowing the rendering context.
-			clone := template.Must(templ.Clone())
-			t.html.t.AddParseTree(withoutExt, clone.Tree)
+		if isShort {
+			t.addShortcodeVariant(templateName, info, templ)
 		}
 
 		return nil
diff --git a/tpl/tplimpl/templateFuncster.go b/tpl/tplimpl/templateFuncster.go
index 1fa6a283573..ad51fbad755 100644
--- a/tpl/tplimpl/templateFuncster.go
+++ b/tpl/tplimpl/templateFuncster.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -14,12 +14,8 @@
 package tplimpl
 
 import (
-	"fmt"
 	"html/template"
-	"strings"
-	texttemplate "text/template"
 
-	bp "github.com/gohugoio/hugo/bufferpool"
 	"github.com/gohugoio/hugo/deps"
 )
 
@@ -35,43 +31,3 @@ func newTemplateFuncster(deps *deps.Deps) *templateFuncster {
 		Deps: deps,
 	}
 }
-
-// Partial executes the named partial and returns either a string,
-// when called from text/template, for or a template.HTML.
-func (t *templateFuncster) partial(name string, contextList ...interface{}) (interface{}, error) {
-	if strings.HasPrefix(name, "partials/") {
-		name = name[8:]
-	}
-	var context interface{}
-
-	if len(contextList) == 0 {
-		context = nil
-	} else {
-		context = contextList[0]
-	}
-
-	for _, n := range []string{"partials/" + name, "theme/partials/" + name} {
-		templ, found := t.Tmpl.Lookup(n)
-		if !found {
-			// For legacy reasons.
-			templ, found = t.Tmpl.Lookup(n + ".html")
-		}
-		if found {
-			b := bp.GetBuffer()
-			defer bp.PutBuffer(b)
-
-			if err := templ.Execute(b, context); err != nil {
-				return "", err
-			}
-
-			if _, ok := templ.(*texttemplate.Template); ok {
-				return b.String(), nil
-			}
-
-			return template.HTML(b.String()), nil
-
-		}
-	}
-
-	return "", fmt.Errorf("Partial %q not found", name)
-}
diff --git a/tpl/tplimpl/template_ast_transformers.go b/tpl/tplimpl/template_ast_transformers.go
index e1cfb1aa427..28898c55baa 100644
--- a/tpl/tplimpl/template_ast_transformers.go
+++ b/tpl/tplimpl/template_ast_transformers.go
@@ -14,11 +14,16 @@
 package tplimpl
 
 import (
-	"errors"
 	"html/template"
 	"strings"
 	texttemplate "text/template"
 	"text/template/parse"
+
+	"github.com/pkg/errors"
+
+	"github.com/gohugoio/hugo/tpl"
+	"github.com/mitchellh/mapstructure"
+	"github.com/spf13/cast"
 )
 
 // decl keeps track of the variable mappings, i.e. $mysite => .Site etc.
@@ -38,6 +43,18 @@ type templateContext struct {
 	decl     decl
 	visited  map[string]bool
 	lookupFn func(name string) *parse.Tree
+
+	// The last error encountered.
+	err error
+
+	// Only needed for shortcodes
+	isShortcode bool
+
+	// Set when we're done checking for config header.
+	configChecked bool
+
+	// Contains some info about the template
+	tpl.Info
 }
 
 func (c templateContext) getIfNotVisited(name string) *parse.Tree {
@@ -49,7 +66,11 @@ func (c templateContext) getIfNotVisited(name string) *parse.Tree {
 }
 
 func newTemplateContext(lookupFn func(name string) *parse.Tree) *templateContext {
-	return &templateContext{lookupFn: lookupFn, decl: make(map[string]string), visited: make(map[string]bool)}
+	return &templateContext{
+		Info:     tpl.Info{Config: tpl.DefaultConfig},
+		lookupFn: lookupFn,
+		decl:     make(map[string]string),
+		visited:  make(map[string]bool)}
 
 }
 
@@ -63,12 +84,12 @@ func createParseTreeLookup(templ *template.Template) func(nn string) *parse.Tree
 	}
 }
 
-func applyTemplateTransformersToHMLTTemplate(templ *template.Template) error {
-	return applyTemplateTransformers(templ.Tree, createParseTreeLookup(templ))
+func applyTemplateTransformersToHMLTTemplate(isShortcode bool, templ *template.Template) (tpl.Info, error) {
+	return applyTemplateTransformers(isShortcode, templ.Tree, createParseTreeLookup(templ))
 }
 
-func applyTemplateTransformersToTextTemplate(templ *texttemplate.Template) error {
-	return applyTemplateTransformers(templ.Tree,
+func applyTemplateTransformersToTextTemplate(isShortcode bool, templ *texttemplate.Template) (tpl.Info, error) {
+	return applyTemplateTransformers(isShortcode, templ.Tree,
 		func(nn string) *parse.Tree {
 			tt := templ.Lookup(nn)
 			if tt != nil {
@@ -78,16 +99,17 @@ func applyTemplateTransformersToTextTemplate(templ *texttemplate.Template) error
 		})
 }
 
-func applyTemplateTransformers(templ *parse.Tree, lookupFn func(name string) *parse.Tree) error {
+func applyTemplateTransformers(isShortcode bool, templ *parse.Tree, lookupFn func(name string) *parse.Tree) (tpl.Info, error) {
 	if templ == nil {
-		return errors.New("expected template, but none provided")
+		return tpl.Info{}, errors.New("expected template, but none provided")
 	}
 
 	c := newTemplateContext(lookupFn)
+	c.isShortcode = isShortcode
 
-	c.applyTransformations(templ.Root)
+	err := c.applyTransformations(templ.Root)
 
-	return nil
+	return c.Info, err
 }
 
 // The truth logic in Go's template package is broken for certain values
@@ -115,10 +137,11 @@ func (c *templateContext) wrapWithGetIf(p *parse.PipeNode) {
 
 }
 
-// applyTransformations do two things:
+// applyTransformations do 3 things:
 // 1) Make all .Params.CamelCase and similar into lowercase.
 // 2) Wraps every with and if pipe in getif
-func (c *templateContext) applyTransformations(n parse.Node) {
+// 3) Collects some information about the template content.
+func (c *templateContext) applyTransformations(n parse.Node) error {
 	switch x := n.(type) {
 	case *parse.ListNode:
 		if x != nil {
@@ -140,6 +163,7 @@ func (c *templateContext) applyTransformations(n parse.Node) {
 			c.applyTransformationsToNodes(subTempl.Root)
 		}
 	case *parse.PipeNode:
+		c.collectConfig(x)
 		if len(x.Decl) == 1 && len(x.Cmds) == 1 {
 			// maps $site => .Site etc.
 			c.decl[x.Decl[0].Ident[0]] = x.Cmds[0].String()
@@ -150,6 +174,8 @@ func (c *templateContext) applyTransformations(n parse.Node) {
 		}
 
 	case *parse.CommandNode:
+		c.collectInner(x)
+
 		for _, elem := range x.Args {
 			switch an := elem.(type) {
 			case *parse.FieldNode:
@@ -166,6 +192,8 @@ func (c *templateContext) applyTransformations(n parse.Node) {
 			}
 		}
 	}
+
+	return c.err
 }
 
 func (c *templateContext) applyTransformationsToNodes(nodes ...parse.Node) {
@@ -187,6 +215,86 @@ func (c *templateContext) updateIdentsIfNeeded(idents []string) {
 
 }
 
+func (c *templateContext) hasIdent(idents []string, ident string) bool {
+	for _, id := range idents {
+		if id == ident {
+			return true
+		}
+	}
+	return false
+}
+
+// collectConfig collects and parses any leading template config variable declaration.
+// This will be the first PipeNode in the template, and will be a variable declaration
+// on the form:
+//    {{ $_hugo_config:= `{ "version": 1 }` }}
+func (c *templateContext) collectConfig(n *parse.PipeNode) {
+	if !c.isShortcode {
+		return
+	}
+	if c.configChecked {
+		return
+	}
+	c.configChecked = true
+
+	if len(n.Decl) != 1 || len(n.Cmds) != 1 {
+		// This cannot be a config declaration
+		return
+	}
+
+	v := n.Decl[0]
+
+	if len(v.Ident) == 0 || v.Ident[0] != "$_hugo_config" {
+		return
+	}
+
+	cmd := n.Cmds[0]
+
+	if len(cmd.Args) == 0 {
+		return
+	}
+
+	if s, ok := cmd.Args[0].(*parse.StringNode); ok {
+		errMsg := "failed to decode $_hugo_config in template"
+		m, err := cast.ToStringMapE(s.Text)
+		if err != nil {
+			c.err = errors.Wrap(err, errMsg)
+			return
+		}
+		if err := mapstructure.WeakDecode(m, &c.Info.Config); err != nil {
+			c.err = errors.Wrap(err, errMsg)
+		}
+	}
+
+}
+
+// collectInner determines if the given CommandNode represents a
+// shortcode call to its .Inner.
+func (c *templateContext) collectInner(n *parse.CommandNode) {
+	if !c.isShortcode {
+		return
+	}
+	if c.Info.IsInner || len(n.Args) == 0 {
+		return
+	}
+
+	for _, arg := range n.Args {
+		var idents []string
+		switch nt := arg.(type) {
+		case *parse.FieldNode:
+			idents = nt.Ident
+		case *parse.VariableNode:
+			idents = nt.Ident
+		}
+
+		if c.hasIdent(idents, "Inner") {
+			c.Info.IsInner = true
+			break
+		}
+	}
+
+}
+
 // indexOfReplacementStart will return the index of where to start doing replacement,
 // -1 if none needed.
 func (d decl) indexOfReplacementStart(idents []string) int {
diff --git a/tpl/tplimpl/template_ast_transformers_test.go b/tpl/tplimpl/template_ast_transformers_test.go
index 611f5d8caa2..8d8b4236833 100644
--- a/tpl/tplimpl/template_ast_transformers_test.go
+++ b/tpl/tplimpl/template_ast_transformers_test.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -21,14 +21,15 @@ import (
 
 	"github.com/gohugoio/hugo/tpl"
 
-	"github.com/gohugoio/hugo/deps"
-	"github.com/gohugoio/hugo/hugofs"
-
 	"github.com/spf13/cast"
 
 	"github.com/stretchr/testify/require"
 )
 
+type handler interface {
+	addTemplate(name, tpl string) error
+}
+
 var (
 	testFuncs = map[string]interface{}{
 		"getif":  func(v interface{}) interface{} { return v },
@@ -179,7 +180,8 @@ PARAMS SITE GLOBAL3: {{ $site.Params.LOWER }}
 func TestParamsKeysToLower(t *testing.T) {
 	t.Parallel()
 
-	require.Error(t, applyTemplateTransformers(nil, nil))
+	_, err := applyTemplateTransformers(false, nil, nil)
+	require.Error(t, err)
 
 	templ, err := template.New("foo").Funcs(testFuncs).Parse(paramsTempl)
 
@@ -429,17 +431,7 @@ func TestInsertIsZeroFunc(t *testing.T) {
 `
 	)
 
-	v := newTestConfig()
-	fs := hugofs.NewMem(v)
-
-	depsCfg := newDepsConfig(v)
-	depsCfg.Fs = fs
-	d, err := deps.New(depsCfg)
-	assert.NoError(err)
-
-	provider := DefaultTemplateProvider
-	provider.Update(d)
-
+	d := newD(assert)
 	h := d.Tmpl.(handler)
 
 	assert.NoError(h.addTemplate("mytemplate.html", templ))
@@ -458,3 +450,45 @@ func TestInsertIsZeroFunc(t *testing.T) {
 	assert.Contains(result, ".NonEmptyInterfaceTypedNil: FALSE")
 
 }
+
+func TestCollectInfo(t *testing.T) {
+
+	configStr := `{ "version": 42 }`
+
+	tests := []struct {
+		name      string
+		tplString string
+		expected  tpl.Info
+	}{
+		{"Basic Inner", `{{ .Inner }}`, tpl.Info{IsInner: true, Config: tpl.DefaultConfig}},
+		{"Basic config map", "{{ $_hugo_config := `" + configStr + "`  }}", tpl.Info{
+			Config: tpl.Config{
+				Version: 42,
+			},
+		}},
+	}
+
+	echo := func(in interface{}) interface{} {
+		return in
+	}
+
+	funcs := template.FuncMap{
+		"highlight": echo,
+	}
+
+	for _, test := range tests {
+		t.Run(test.name, func(t *testing.T) {
+			assert := require.New(t)
+
+			templ, err := template.New("foo").Funcs(funcs).Parse(test.tplString)
+			require.NoError(t, err)
+
+			c := newTemplateContext(createParseTreeLookup(templ))
+			c.isShortcode = true
+			c.applyTransformations(templ.Tree.Root)
+
+			assert.Equal(test.expected, c.Info)
+		})
+	}
+
+}
diff --git a/tpl/tplimpl/template_funcs_test.go b/tpl/tplimpl/template_funcs_test.go
index 22387dc016f..c21ef38a629 100644
--- a/tpl/tplimpl/template_funcs_test.go
+++ b/tpl/tplimpl/template_funcs_test.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -220,21 +220,3 @@ func doBenchmarkPartial(b *testing.B, f func(ns *partials.Namespace) error) {
 		}
 	})
 }
-
-func newTestFuncster() *templateFuncster {
-	return newTestFuncsterWithViper(viper.New())
-}
-
-func newTestFuncsterWithViper(v *viper.Viper) *templateFuncster {
-	config := newDepsConfig(v)
-	d, err := deps.New(config)
-	if err != nil {
-		panic(err)
-	}
-
-	if err := d.LoadResources(); err != nil {
-		panic(err)
-	}
-
-	return d.Tmpl.(*templateHandler).html.funcster
-}
diff --git a/tpl/tplimpl/template_test.go b/tpl/tplimpl/template_info_test.go
similarity index 55%
rename from tpl/tplimpl/template_test.go
rename to tpl/tplimpl/template_info_test.go
index 683850fa57c..0ebaa6da3c2 100644
--- a/tpl/tplimpl/template_test.go
+++ b/tpl/tplimpl/template_info_test.go
@@ -1,4 +1,4 @@
-// Copyright 2017-present The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -10,7 +10,6 @@
 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 // See the License for the specific language governing permissions and
 // limitations under the License.
-
 package tplimpl
 
 import (
@@ -22,45 +21,36 @@ import (
 	"github.com/stretchr/testify/require"
 )
 
-type handler interface {
-	addTemplate(name, tpl string) error
-}
-
-// #3876
-func TestHTMLEscape(t *testing.T) {
+func TestTemplateInfoShortcode(t *testing.T) {
 	assert := require.New(t)
+	d := newD(assert)
+	h := d.Tmpl.(handler)
+
+	assert.NoError(h.addTemplate("shortcodes/mytemplate.html", `
+{{ .Inner }}
+`))
+	tt, found, _ := d.Tmpl.LookupVariant("mytemplate", tpl.TemplateVariants{})
+
+	assert.True(found)
+	tti, ok := tt.(tpl.TemplateInfoProvider)
+	assert.True(ok)
+	assert.True(tti.TemplateInfo().IsInner)
+
+}
 
-	data := map[string]string{
-		"html":  "<h1>Hi!</h1>",
-		"other": "<h1>Hi!</h1>",
-	}
+// TODO(bep) move and use in other places
+func newD(assert *require.Assertions) *deps.Deps {
 	v := newTestConfig()
 	fs := hugofs.NewMem(v)
 
-	//afero.WriteFile(fs.Source, filepath.Join(workingDir, "README.txt"), []byte("Hugo Rocks!"), 0755)
-
 	depsCfg := newDepsConfig(v)
 	depsCfg.Fs = fs
 	d, err := deps.New(depsCfg)
 	assert.NoError(err)
 
-	templ := `{{ "<h1>Hi!</h1>" | safeHTML }}`
-
 	provider := DefaultTemplateProvider
 	provider.Update(d)
 
-	h := d.Tmpl.(handler)
-
-	assert.NoError(h.addTemplate("shortcodes/myShort.html", templ))
-
-	tt, _ := d.Tmpl.Lookup("shortcodes/myShort.html")
-	s, err := tt.(tpl.TemplateExecutor).ExecuteToString(data)
-	assert.NoError(err)
-	assert.Contains(s, "<h1>Hi!</h1>")
-
-	tt, _ = d.Tmpl.Lookup("shortcodes/myShort")
-	s, err = tt.(tpl.TemplateExecutor).ExecuteToString(data)
-	assert.NoError(err)
-	assert.Contains(s, "<h1>Hi!</h1>")
+	return d
 
 }