From b0d9569bd937e927c12e020a51c026b6d81b4d35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Erik=20Pedersen?= Date: Wed, 2 Jan 2019 12:33:26 +0100 Subject: [PATCH] Add a Page interface The main motivation of this commit is to add a `page.Page` interface to replace the old very file-oriented `hugolib.Page` struct to prepare for #5074, "pages from other data sources". But this also fixes a set of annoying limiations, especially related to custom output formats, and shortcodes. And it's faster. See #5074 Fixes #5090 Fixes #5204 Fixes #4695 Fixes #5607 Fixes #5704 Fixes #5707 Fixes #5719 --- benchbep.sh | 2 + commands/commands_test.go | 2 +- commands/convert.go | 20 +- commands/hugo.go | 10 +- commands/list.go | 11 +- commands/server.go | 2 +- common/hugio/readers.go | 1 + common/maps/scratch.go | 18 + config/configProvider.go | 12 + config/services/servicesConfig.go | 12 + {hugolib => config}/sitemap.go | 15 +- create/content.go | 6 +- deps/deps.go | 7 +- docs/content/en/variables/page.md | 15 +- go.sum | 1 + helpers/content.go | 6 +- helpers/content_renderer_test.go | 4 +- helpers/content_test.go | 22 +- helpers/general.go | 2 +- helpers/general_test.go | 23 +- helpers/path.go | 7 + helpers/pygments.go | 2 +- htesting/test_structs.go | 47 +- hugolib/alias.go | 28 +- hugolib/alias_test.go | 2 +- hugolib/collections.go | 73 +- hugolib/collections_test.go | 16 +- hugolib/config.go | 12 +- hugolib/datafiles_test.go | 4 +- hugolib/disableKinds_test.go | 38 +- hugolib/embedded_shortcodes_test.go | 8 +- hugolib/gitinfo.go | 12 +- hugolib/hugo_sites.go | 489 ++-- hugolib/hugo_sites_build.go | 122 +- hugolib/hugo_sites_build_errors_test.go | 118 +- hugolib/hugo_sites_build_test.go | 358 ++- hugolib/hugo_sites_multihost_test.go | 26 +- hugolib/hugo_smoke_test.go | 297 ++ hugolib/language_content_dir_test.go | 42 +- hugolib/media.go | 60 - hugolib/menu_test.go | 6 +- hugolib/minify_publisher_test.go | 18 +- hugolib/multilingual.go | 14 +- hugolib/orderedMap.go | 8 - hugolib/page.go | 2543 ++++------------- hugolib/page__common.go | 106 + hugolib/page__data.go | 70 + hugolib/page__menus.go | 71 + hugolib/page__meta.go | 639 +++++ hugolib/page__new.go | 331 +++ hugolib/page__output.go | 47 + hugolib/page__paths.go | 148 + hugolib/page__per_output.go | 426 +++ hugolib/{page_ref.go => page__ref.go} | 61 +- hugolib/page__tree.go | 113 + hugolib/page_content.go | 247 +- hugolib/page_errors.go | 47 - hugolib/page_kinds.go | 40 + hugolib/page_output.go | 320 --- hugolib/page_pagination.go | 83 + hugolib/page_paths.go | 312 -- hugolib/page_paths_test.go | 194 -- hugolib/page_permalink_test.go | 42 +- hugolib/page_position.go | 76 + hugolib/page_taxonomy_test.go | 96 - hugolib/page_test.go | 808 +----- hugolib/page_time_integration_test.go | 183 -- hugolib/page_unwrap.go | 50 + ...separators_test.go => page_unwrap_test.go} | 31 +- hugolib/page_without_content.go | 67 - hugolib/pagebundler.go | 10 +- hugolib/pagebundler_handlers.go | 123 +- hugolib/pagebundler_test.go | 164 +- hugolib/pagecollections.go | 280 +- hugolib/pagecollections_test.go | 84 +- hugolib/pages_language_merge_test.go | 40 +- hugolib/pages_related_test.go | 75 - hugolib/paths/paths.go | 1 + hugolib/paths/themes.go | 2 +- hugolib/permalinker.go | 3 +- hugolib/permalinks.go | 213 -- hugolib/permalinks_test.go | 85 - hugolib/resource_chain_test.go | 6 +- hugolib/rss_test.go | 2 +- hugolib/shortcode.go | 292 +- hugolib/shortcode_page.go | 56 + hugolib/shortcode_test.go | 358 +-- hugolib/site.go | 1004 +++---- hugolib/siteJSONEncode_test.go | 4 +- hugolib/site_output.go | 13 +- hugolib/site_output_test.go | 40 +- hugolib/site_render.go | 367 +-- hugolib/site_sections.go | 269 +- hugolib/site_sections_test.go | 149 +- hugolib/site_test.go | 119 +- hugolib/site_url_test.go | 12 +- hugolib/sitemap_test.go | 8 +- hugolib/taxonomy.go | 149 +- hugolib/taxonomy_test.go | 115 +- hugolib/testhelpers_test.go | 103 +- hugolib/translations.go | 54 +- lazy/init.go | 193 ++ lazy/init_test.go | 150 + lazy/twice.go | 64 + media/mediaType.go | 1 + {hugolib => navigation}/menu.go | 41 +- navigation/pagemenus.go | 240 ++ parser/pageparser/itemtype_string.go | 16 + parser/pageparser/pageparser.go | 25 +- parser/pageparser/pageparser_test.go | 4 +- publisher/publisher.go | 2 +- related/inverted_index.go | 8 +- related/inverted_index_test.go | 2 +- resources/image.go | 3 +- resources/image_cache.go | 18 - resources/page/page.go | 304 ++ .../page/page_author.go | 4 +- resources/page/page_data.go | 42 + resources/page/page_data_test.go | 57 + .../page/page_kinds.go | 18 +- .../page/page_kinds_test.go | 25 +- resources/page/page_nop.go | 467 +++ resources/page/page_outputformat.go | 85 + resources/page/page_paths.go | 333 +++ resources/page/page_paths_test.go | 252 ++ .../page/pagegroup.go | 137 +- .../page/pagegroup_test.go | 97 +- .../page}/pagemeta/page_frontmatter.go | 13 +- .../page}/pagemeta/page_frontmatter_test.go | 39 +- .../page}/pagemeta/pagemeta.go | 11 - resources/page/pages.go | 116 + .../page/pages_cache.go | 2 +- .../page/pages_cache_test.go | 12 +- .../page}/pages_language_merge.go | 6 +- .../page/pages_prev_next.go | 12 +- .../page/pages_prev_next_test.go | 19 +- {hugolib => resources/page}/pages_related.go | 52 +- resources/page/pages_related_test.go | 86 + .../page/pages_sort.go | 104 +- .../page/pages_sort_test.go | 116 +- {hugolib => resources/page}/pagination.go | 267 +- .../page}/pagination_test.go | 237 +- resources/page/permalinks.go | 259 ++ resources/page/permalinks_test.go | 180 ++ resources/page/site.go | 52 + resources/page/testhelpers_test.go | 548 ++++ resources/page/weighted.go | 140 + resources/resource.go | 56 +- resources/resource/dates.go | 81 + resources/resource/params.go | 89 + resources/resource/resource_helpers.go | 70 + resources/resource/resourcetypes.go | 98 +- resources/resource_metadata.go | 1 - resources/resource_metadata_test.go | 4 +- resources/resource_test.go | 6 +- resources/testhelpers_test.go | 19 +- resources/transform.go | 4 +- source/fileInfo.go | 31 +- tpl/collections/collections.go | 7 +- tpl/collections/collections_test.go | 36 +- tpl/collections/where.go | 2 +- tpl/template.go | 7 + tpl/tplimpl/embedded/templates.autogen.go | 16 +- .../embedded/templates/_default/rss.xml | 8 +- .../embedded/templates/_default/sitemap.xml | 4 +- tpl/tplimpl/embedded/templates/disqus.html | 2 +- tpl/tplimpl/template.go | 14 +- 167 files changed, 10755 insertions(+), 8008 deletions(-) create mode 100755 benchbep.sh rename {hugolib => config}/sitemap.go (73%) create mode 100644 hugolib/hugo_smoke_test.go delete mode 100644 hugolib/media.go create mode 100644 hugolib/page__common.go create mode 100644 hugolib/page__data.go create mode 100644 hugolib/page__menus.go create mode 100644 hugolib/page__meta.go create mode 100644 hugolib/page__new.go create mode 100644 hugolib/page__output.go create mode 100644 hugolib/page__paths.go create mode 100644 hugolib/page__per_output.go rename hugolib/{page_ref.go => page__ref.go} (56%) create mode 100644 hugolib/page__tree.go delete mode 100644 hugolib/page_errors.go create mode 100644 hugolib/page_kinds.go delete mode 100644 hugolib/page_output.go create mode 100644 hugolib/page_pagination.go delete mode 100644 hugolib/page_paths.go delete mode 100644 hugolib/page_paths_test.go create mode 100644 hugolib/page_position.go delete mode 100644 hugolib/page_taxonomy_test.go delete mode 100644 hugolib/page_time_integration_test.go create mode 100644 hugolib/page_unwrap.go rename hugolib/{path_separators_test.go => page_unwrap_test.go} (57%) delete mode 100644 hugolib/page_without_content.go delete mode 100644 hugolib/pages_related_test.go delete mode 100644 hugolib/permalinks.go delete mode 100644 hugolib/permalinks_test.go create mode 100644 hugolib/shortcode_page.go create mode 100644 lazy/init.go create mode 100644 lazy/init_test.go create mode 100644 lazy/twice.go rename {hugolib => navigation}/menu.go (89%) create mode 100644 navigation/pagemenus.go create mode 100644 parser/pageparser/itemtype_string.go create mode 100644 resources/page/page.go rename hugolib/author.go => resources/page/page_author.go (94%) create mode 100644 resources/page/page_data.go create mode 100644 resources/page/page_data_test.go rename hugolib/page_resource.go => resources/page/page_kinds.go (67%) rename common/hugo/site.go => resources/page/page_kinds_test.go (55%) create mode 100644 resources/page/page_nop.go create mode 100644 resources/page/page_outputformat.go create mode 100644 resources/page/page_paths.go create mode 100644 resources/page/page_paths_test.go rename hugolib/pageGroup.go => resources/page/pagegroup.go (71%) rename hugolib/pageGroup_test.go => resources/page/pagegroup_test.go (83%) rename {hugolib => resources/page}/pagemeta/page_frontmatter.go (98%) rename {hugolib => resources/page}/pagemeta/page_frontmatter_test.go (88%) rename {hugolib => resources/page}/pagemeta/pagemeta.go (83%) create mode 100644 resources/page/pages.go rename hugolib/pageCache.go => resources/page/pages_cache.go (99%) rename hugolib/pageCache_test.go => resources/page/pages_cache_test.go (88%) rename {hugolib => resources/page}/pages_language_merge.go (94%) rename hugolib/pagesPrevNext.go => resources/page/pages_prev_next.go (77%) rename hugolib/pagesPrevNext_test.go => resources/page/pages_prev_next_test.go (85%) rename {hugolib => resources/page}/pages_related.go (81%) create mode 100644 resources/page/pages_related_test.go rename hugolib/pageSort.go => resources/page/pages_sort.go (78%) rename hugolib/pageSort_test.go => resources/page/pages_sort_test.go (72%) rename {hugolib => resources/page}/pagination.go (56%) rename {hugolib => resources/page}/pagination_test.go (72%) create mode 100644 resources/page/permalinks.go create mode 100644 resources/page/permalinks_test.go create mode 100644 resources/page/site.go create mode 100644 resources/page/testhelpers_test.go create mode 100644 resources/page/weighted.go create mode 100644 resources/resource/dates.go create mode 100644 resources/resource/params.go create mode 100644 resources/resource/resource_helpers.go diff --git a/benchbep.sh b/benchbep.sh new file mode 100755 index 00000000000..e94cc4e6308 --- /dev/null +++ b/benchbep.sh @@ -0,0 +1,2 @@ +gobench -package=./hugolib -bench="BenchmarkSiteBuilding/TOML,num_langs=3,num_pages=5000,tags_per_page=5,shortcodes,render" -count=3 > 1.bench +benchcmp -best 0.bench 1.bench \ No newline at end of file diff --git a/commands/commands_test.go b/commands/commands_test.go index 2e8b99dc413..00dc5c39a23 100644 --- a/commands/commands_test.go +++ b/commands/commands_test.go @@ -41,7 +41,7 @@ func TestExecute(t *testing.T) { assert.NoError(resp.Err) result := resp.Result assert.True(len(result.Sites) == 1) - assert.True(len(result.Sites[0].RegularPages) == 1) + assert.True(len(result.Sites[0].RegularPages()) == 1) } func TestCommandsPersistentFlags(t *testing.T) { diff --git a/commands/convert.go b/commands/convert.go index c4f88a24537..7c7417e6fad 100644 --- a/commands/convert.go +++ b/commands/convert.go @@ -20,6 +20,8 @@ import ( "strings" "time" + "github.com/gohugoio/hugo/resources/page" + "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/helpers" @@ -124,8 +126,8 @@ func (cc *convertCmd) convertContents(format metadecoders.Format) error { site := h.Sites[0] - site.Log.FEEDBACK.Println("processing", len(site.AllPages), "content files") - for _, p := range site.AllPages { + site.Log.FEEDBACK.Println("processing", len(site.AllPages()), "content files") + for _, p := range site.AllPages() { if err := cc.convertAndSavePage(p, site, format); err != nil { return err } @@ -133,24 +135,24 @@ func (cc *convertCmd) convertContents(format metadecoders.Format) error { return nil } -func (cc *convertCmd) convertAndSavePage(p *hugolib.Page, site *hugolib.Site, targetFormat metadecoders.Format) error { +func (cc *convertCmd) convertAndSavePage(p page.Page, site *hugolib.Site, targetFormat metadecoders.Format) error { // The resources are not in .Site.AllPages. - for _, r := range p.Resources.ByType("page") { - if err := cc.convertAndSavePage(r.(*hugolib.Page), site, targetFormat); err != nil { + for _, r := range p.Resources().ByType("page") { + if err := cc.convertAndSavePage(r.(page.Page), site, targetFormat); err != nil { return err } } - if p.Filename() == "" { + if p.File().Filename() == "" { // No content file. return nil } errMsg := fmt.Errorf("Error processing file %q", p.Path()) - site.Log.INFO.Println("Attempting to convert", p.LogicalName()) + site.Log.INFO.Println("Attempting to convert", p.File().Filename()) - f, _ := p.File.(src.ReadableFile) + f, _ := p.File().(src.ReadableFile) file, err := f.Open() if err != nil { site.Log.ERROR.Println(errMsg) @@ -186,7 +188,7 @@ func (cc *convertCmd) convertAndSavePage(p *hugolib.Page, site *hugolib.Site, ta newContent.Write(pf.content) - newFilename := p.Filename() + newFilename := p.File().Filename() if cc.outputDir != "" { contentDir := strings.TrimSuffix(newFilename, p.Path()) diff --git a/commands/hugo.go b/commands/hugo.go index 3690c0ad519..30728770606 100644 --- a/commands/hugo.go +++ b/commands/hugo.go @@ -18,11 +18,12 @@ package commands import ( "fmt" "io/ioutil" - "os/signal" "sort" "sync/atomic" + "github.com/gohugoio/hugo/resources/page" + "github.com/gohugoio/hugo/common/hugo" "github.com/pkg/errors" @@ -326,7 +327,7 @@ func (c *commandeer) fullBuild() error { } for _, s := range c.hugo.Sites { - s.ProcessingStats.Static = langCount[s.Language.Lang] + s.ProcessingStats.Static = langCount[s.Language().Lang] } if c.h.gc { @@ -973,7 +974,7 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher, navigate := c.Cfg.GetBool("navigateToChanged") // We have fetched the same page above, but it may have // changed. - var p *hugolib.Page + var p page.Page if navigate { if onePageName != "" { @@ -982,7 +983,8 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher, } if p != nil { - livereload.NavigateToPathForPort(p.RelPermalink(), p.Site.ServerPort()) + // TODO(bep) page + livereload.NavigateToPathForPort(p.RelPermalink(), 1313) // p.Site.ServerPort()) } else { livereload.ForceRefresh() } diff --git a/commands/list.go b/commands/list.go index 9922e957df8..5f3366fd025 100644 --- a/commands/list.go +++ b/commands/list.go @@ -17,6 +17,7 @@ import ( "path/filepath" "github.com/gohugoio/hugo/hugolib" + "github.com/gohugoio/hugo/resources/resource" "github.com/spf13/cobra" jww "github.com/spf13/jwalterweatherman" ) @@ -67,7 +68,7 @@ List requires a subcommand, e.g. ` + "`hugo list drafts`.", for _, p := range sites.Pages() { if p.IsDraft() { - jww.FEEDBACK.Println(filepath.Join(p.File.Dir(), p.File.LogicalName())) + jww.FEEDBACK.Println(filepath.Join(p.File().Dir(), p.File().LogicalName())) } } @@ -102,8 +103,8 @@ posted in the future.`, } for _, p := range sites.Pages() { - if p.IsFuture() { - jww.FEEDBACK.Println(filepath.Join(p.File.Dir(), p.File.LogicalName())) + if resource.IsFuture(p) { + jww.FEEDBACK.Println(filepath.Join(p.File().Dir(), p.File().LogicalName())) } } @@ -138,8 +139,8 @@ expired.`, } for _, p := range sites.Pages() { - if p.IsExpired() { - jww.FEEDBACK.Println(filepath.Join(p.File.Dir(), p.File.LogicalName())) + if resource.IsExpired(p) { + jww.FEEDBACK.Println(filepath.Join(p.File().Dir(), p.File().LogicalName())) } } diff --git a/commands/server.go b/commands/server.go index c2bd76dae54..2afb7e920e3 100644 --- a/commands/server.go +++ b/commands/server.go @@ -403,7 +403,7 @@ func (c *commandeer) serve(s *serverCmd) error { if isMultiHost { for _, s := range c.hugo.Sites { baseURLs = append(baseURLs, s.BaseURL.String()) - roots = append(roots, s.Language.Lang) + roots = append(roots, s.Language().Lang) } } else { s := c.hugo.Sites[0] diff --git a/common/hugio/readers.go b/common/hugio/readers.go index ba55e2d08da..92c5ba8151c 100644 --- a/common/hugio/readers.go +++ b/common/hugio/readers.go @@ -32,6 +32,7 @@ type ReadSeekCloser interface { } // ReadSeekerNoOpCloser implements ReadSeekCloser by doing nothing in Close. +// TODO(bep) rename this and simila to ReadSeekerNopCloser, naming used in stdlib, which kind of makes sense. type ReadSeekerNoOpCloser struct { ReadSeeker } diff --git a/common/maps/scratch.go b/common/maps/scratch.go index 2972e202200..6862a8f8455 100644 --- a/common/maps/scratch.go +++ b/common/maps/scratch.go @@ -28,6 +28,24 @@ type Scratch struct { mu sync.RWMutex } +// Scratcher provides a scratching service. +type Scratcher interface { + Scratch() *Scratch +} + +type scratcher struct { + s *Scratch +} + +func (s scratcher) Scratch() *Scratch { + return s.s +} + +// NewScratcher creates a new Scratcher. +func NewScratcher() Scratcher { + return scratcher{s: NewScratch()} +} + // Add will, for single values, add (using the + operator) the addend to the existing addend (if found). // Supports numeric values and strings. // diff --git a/config/configProvider.go b/config/configProvider.go index bc0dd950d7a..89cfe4359e1 100644 --- a/config/configProvider.go +++ b/config/configProvider.go @@ -40,3 +40,15 @@ func GetStringSlicePreserveString(cfg Provider, key string) []string { } return cast.ToStringSlice(sd) } + +// SetBaseTestDefaults provides some common config defaults used in tests. +func SetBaseTestDefaults(cfg Provider) { + cfg.Set("resourceDir", "resources") + cfg.Set("contentDir", "content") + cfg.Set("dataDir", "data") + cfg.Set("i18nDir", "i18n") + cfg.Set("layoutDir", "layouts") + cfg.Set("assetDir", "assets") + cfg.Set("archetypeDir", "archetypes") + cfg.Set("publishDir", "public") +} diff --git a/config/services/servicesConfig.go b/config/services/servicesConfig.go index 7306f527483..871ffcac9d6 100644 --- a/config/services/servicesConfig.go +++ b/config/services/servicesConfig.go @@ -23,6 +23,7 @@ const ( disqusShortnameKey = "disqusshortname" googleAnalyticsKey = "googleanalytics" + rssLimitKey = "rssLimit" ) // Config is a privacy configuration for all the relevant services in Hugo. @@ -31,6 +32,7 @@ type Config struct { GoogleAnalytics GoogleAnalytics Instagram Instagram Twitter Twitter + RSS RSS } // Disqus holds the functional configuration settings related to the Disqus template. @@ -61,6 +63,12 @@ type Twitter struct { DisableInlineCSS bool } +// RSS holds the functional configuration settings related to the RSS feeds. +type RSS struct { + // Limit the number of pages. + Limit int +} + // DecodeConfig creates a services Config from a given Hugo configuration. func DecodeConfig(cfg config.Provider) (c Config, err error) { m := cfg.GetStringMap(servicesConfigKey) @@ -76,5 +84,9 @@ func DecodeConfig(cfg config.Provider) (c Config, err error) { c.Disqus.Shortname = cfg.GetString(disqusShortnameKey) } + if c.RSS.Limit == 0 { + c.RSS.Limit = cfg.GetInt(rssLimitKey) + } + return } diff --git a/hugolib/sitemap.go b/config/sitemap.go similarity index 73% rename from hugolib/sitemap.go rename to config/sitemap.go index 64d6f5b7a75..4031b7ec115 100644 --- a/hugolib/sitemap.go +++ b/config/sitemap.go @@ -1,4 +1,4 @@ -// Copyright 2015 The Hugo Authors. All rights reserved. +// Copyright 2019 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package hugolib +package config import ( "github.com/spf13/cast" @@ -25,21 +25,20 @@ type Sitemap struct { Filename string } -func parseSitemap(input map[string]interface{}) Sitemap { - sitemap := Sitemap{Priority: -1, Filename: "sitemap.xml"} +func DecodeSitemap(prototype Sitemap, input map[string]interface{}) Sitemap { for key, value := range input { switch key { case "changefreq": - sitemap.ChangeFreq = cast.ToString(value) + prototype.ChangeFreq = cast.ToString(value) case "priority": - sitemap.Priority = cast.ToFloat64(value) + prototype.Priority = cast.ToFloat64(value) case "filename": - sitemap.Filename = cast.ToString(value) + prototype.Filename = cast.ToString(value) default: jww.WARN.Printf("Unknown Sitemap field: %s\n", key) } } - return sitemap + return prototype } diff --git a/create/content.go b/create/content.go index 31b7b2e4d70..8ac075ac3ee 100644 --- a/create/content.go +++ b/create/content.go @@ -50,7 +50,7 @@ func NewContent( if isDir { - langFs := hugofs.NewLanguageFs(s.Language.Lang, sites.LanguageSet(), archetypeFs) + langFs := hugofs.NewLanguageFs(s.Language().Lang, sites.LanguageSet(), archetypeFs) cm, err := mapArcheTypeDir(ps, langFs, archetypeFilename) if err != nil { @@ -113,7 +113,7 @@ func NewContent( func targetSite(sites *hugolib.HugoSites, fi *hugofs.LanguageFileInfo) *hugolib.Site { for _, s := range sites.Sites { - if fi.Lang() == s.Language.Lang { + if fi.Lang() == s.Language().Lang { return s } } @@ -245,7 +245,7 @@ func resolveContentPath(sites *hugolib.HugoSites, fs afero.Fs, targetPath string // Try the filename: my-post.en.md for _, ss := range sites.Sites { - if strings.Contains(targetPath, "."+ss.Language.Lang+".") { + if strings.Contains(targetPath, "."+ss.Language().Lang+".") { s = ss break } diff --git a/deps/deps.go b/deps/deps.go index 628019961bc..47159d017c2 100644 --- a/deps/deps.go +++ b/deps/deps.go @@ -7,13 +7,14 @@ import ( "github.com/pkg/errors" "github.com/gohugoio/hugo/cache/filecache" - "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/media" + "github.com/gohugoio/hugo/resources/page" + "github.com/gohugoio/hugo/metrics" "github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/resources" @@ -67,7 +68,7 @@ type Deps struct { Language *langs.Language // The site building. - Site hugo.Site + Site page.Site // All the output formats available for the current site. OutputFormatsConfig output.Formats @@ -325,7 +326,7 @@ type DepsCfg struct { Language *langs.Language // The Site in use - Site hugo.Site + Site page.Site // The configuration to use. Cfg config.Provider diff --git a/docs/content/en/variables/page.md b/docs/content/en/variables/page.md index 9dcbdcc435e..c4ddc820040 100644 --- a/docs/content/en/variables/page.md +++ b/docs/content/en/variables/page.md @@ -79,8 +79,7 @@ See [`.Scratch`](/functions/scratch/) for page-scoped, writable variables. : the page's *kind*. Possible return values are `page`, `home`, `section`, `taxonomy`, or `taxonomyTerm`. Note that there are also `RSS`, `sitemap`, `robotsTXT`, and `404` kinds, but these are only available during the rendering of each of these respective page's kind and therefore *not* available in any of the `Pages` collections. .Language -: a language object that points to the language's definition in the site -`config`. +: a language object that points to the language's definition in the site `config`. `.Language.Lang` gives you the language code. .Lastmod : the date the content was last modified. `.Lastmod` pulls from the `lastmod` field in a content's front matter. @@ -93,10 +92,7 @@ See also `.ExpiryDate`, `.Date`, `.PublishDate`, and [`.GitInfo`][gitinfo]. .LinkTitle : access when creating links to the content. If set, Hugo will use the `linktitle` from the front matter before `title`. -.Next (deprecated) -: In older Hugo versions this pointer went the wrong direction. Please use `.PrevPage` instead. - -.NextPage +.Next : Pointer to the next [regular page](/variables/site/#site-pages) (sorted by Hugo's [default sort](/templates/lists#default-weight-date-linktitle-filepath)). Example: `{{if .NextPage}}{{.NextPage.Permalink}}{{end}}`. .NextInSection @@ -119,9 +115,6 @@ See also `.ExpiryDate`, `.Date`, `.PublishDate`, and [`.GitInfo`][gitinfo]. : the Page content stripped of HTML as a `[]string` using Go's [`strings.Fields`](https://golang.org/pkg/strings/#Fields) to split `.Plain` into a slice. .Prev (deprecated) -: In older Hugo versions this pointer went the wrong direction. Please use `.NextPage` instead. - -.PrevPage : Pointer to the previous [regular page](/variables/site/#site-pages) (sorted by Hugo's [default sort](/templates/lists#default-weight-date-linktitle-filepath)). Example: `{{if .PrevPage}}{{.PrevPage.Permalink}}{{end}}`. .PrevInSection @@ -130,8 +123,8 @@ See also `.ExpiryDate`, `.Date`, `.PublishDate`, and [`.GitInfo`][gitinfo]. .PublishDate : the date on which the content was or will be published; `.Publishdate` pulls from the `publishdate` field in a content's front matter. See also `.ExpiryDate`, `.Date`, and `.Lastmod`. -.RSSLink -: link to the taxonomies' RSS link. +.RSSLink (deprecated) +: link to the page's RSS feed. This is deprecated. You should instead do something like this: `{{ with .OutputFormats.Get "RSS" }}{{ . RelPermalink }}{{ end }}`. .RawContent : raw markdown content without the front matter. Useful with [remarkjs.com]( diff --git a/go.sum b/go.sum index e2cf53c7553..8578104a6a3 100644 --- a/go.sum +++ b/go.sum @@ -75,6 +75,7 @@ github.com/magefile/mage v1.4.0 h1:RI7B1CgnPAuu2O9lWszwya61RLmfL0KCdo+QyyI/Bhk= github.com/magefile/mage v1.4.0/go.mod h1:IUDi13rsHje59lecXokTfGX0QIzO45uVPlXnJYsXepA= github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/markbates/inflect v0.0.0-20171215194931-a12c3aec81a6 h1:LZhVjIISSbj8qLf2qDPP0D8z0uvOWAW5C85ly5mJW6c= github.com/markbates/inflect v0.0.0-20171215194931-a12c3aec81a6/go.mod h1:oTeZL2KHA7CUX6X+fovmK9OvIOFuqu0TwdQrZjLTh88= github.com/matryer/try v0.0.0-20161228173917-9ac251b645a2/go.mod h1:0KeJpeMD6o+O4hW7qJOT7vyQPKrWmj26uf5wMc/IiIs= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= diff --git a/helpers/content.go b/helpers/content.go index f8479cd1b9a..f73ee7fa3ea 100644 --- a/helpers/content.go +++ b/helpers/content.go @@ -57,7 +57,7 @@ type ContentSpec struct { Highlight func(code, lang, optsStr string) (string, error) defatultPygmentsOpts map[string]string - cfg config.Provider + Cfg config.Provider } // NewContentSpec returns a ContentSpec initialized @@ -73,7 +73,7 @@ func NewContentSpec(cfg config.Provider) (*ContentSpec, error) { BuildExpired: cfg.GetBool("buildExpired"), BuildDrafts: cfg.GetBool("buildDrafts"), - cfg: cfg, + Cfg: cfg, } // Highlighting setup @@ -376,7 +376,7 @@ func (c *ContentSpec) getMmarkHTMLRenderer(defaultFlags int, ctx *RenderingConte return &HugoMmarkHTMLRenderer{ cs: c, Renderer: mmark.HtmlRendererWithParameters(htmlFlags, "", "", renderParameters), - Cfg: c.cfg, + Cfg: c.Cfg, } } diff --git a/helpers/content_renderer_test.go b/helpers/content_renderer_test.go index a01014b4eb3..db61cbaeffa 100644 --- a/helpers/content_renderer_test.go +++ b/helpers/content_renderer_test.go @@ -24,7 +24,7 @@ import ( // Renders a codeblock using Blackfriday func (c ContentSpec) render(input string) string { - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} render := c.getHTMLRenderer(0, ctx) buf := &bytes.Buffer{} @@ -34,7 +34,7 @@ func (c ContentSpec) render(input string) string { // Renders a codeblock using Mmark func (c ContentSpec) renderWithMmark(input string) string { - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} render := c.getMmarkHTMLRenderer(0, ctx) buf := &bytes.Buffer{} diff --git a/helpers/content_test.go b/helpers/content_test.go index 5297df2de2a..6971a8fc8b0 100644 --- a/helpers/content_test.go +++ b/helpers/content_test.go @@ -181,7 +181,7 @@ func TestTruncateWordsByRune(t *testing.T) { func TestGetHTMLRendererFlags(t *testing.T) { c := newTestContentSpec() - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} renderer := c.getHTMLRenderer(blackfriday.HTML_USE_XHTML, ctx) flags := renderer.GetFlags() if flags&blackfriday.HTML_USE_XHTML != blackfriday.HTML_USE_XHTML { @@ -210,7 +210,7 @@ func TestGetHTMLRendererAllFlags(t *testing.T) { {blackfriday.HTML_SMARTYPANTS_LATEX_DASHES}, } defaultFlags := blackfriday.HTML_USE_XHTML - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} ctx.Config.AngledQuotes = true ctx.Config.Fractions = true ctx.Config.HrefTargetBlank = true @@ -235,7 +235,7 @@ func TestGetHTMLRendererAllFlags(t *testing.T) { func TestGetHTMLRendererAnchors(t *testing.T) { c := newTestContentSpec() - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} ctx.DocumentID = "testid" ctx.Config.PlainIDAnchors = false @@ -259,7 +259,7 @@ func TestGetHTMLRendererAnchors(t *testing.T) { func TestGetMmarkHTMLRenderer(t *testing.T) { c := newTestContentSpec() - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} ctx.DocumentID = "testid" ctx.Config.PlainIDAnchors = false actualRenderer := c.getMmarkHTMLRenderer(0, ctx) @@ -283,7 +283,7 @@ func TestGetMmarkHTMLRenderer(t *testing.T) { func TestGetMarkdownExtensionsMasksAreRemovedFromExtensions(t *testing.T) { c := newTestContentSpec() - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} ctx.Config.Extensions = []string{"headerId"} ctx.Config.ExtensionsMask = []string{"noIntraEmphasis"} @@ -298,7 +298,7 @@ func TestGetMarkdownExtensionsByDefaultAllExtensionsAreEnabled(t *testing.T) { testFlag int } c := newTestContentSpec() - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} ctx.Config.Extensions = []string{""} ctx.Config.ExtensionsMask = []string{""} allExtensions := []data{ @@ -330,7 +330,7 @@ func TestGetMarkdownExtensionsByDefaultAllExtensionsAreEnabled(t *testing.T) { func TestGetMarkdownExtensionsAddingFlagsThroughRenderingContext(t *testing.T) { c := newTestContentSpec() - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} ctx.Config.Extensions = []string{"definitionLists"} ctx.Config.ExtensionsMask = []string{""} @@ -342,7 +342,7 @@ func TestGetMarkdownExtensionsAddingFlagsThroughRenderingContext(t *testing.T) { func TestGetMarkdownRenderer(t *testing.T) { c := newTestContentSpec() - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} ctx.Content = []byte("testContent") actualRenderedMarkdown := c.markdownRender(ctx) expectedRenderedMarkdown := []byte("

testContent

\n") @@ -353,7 +353,7 @@ func TestGetMarkdownRenderer(t *testing.T) { func TestGetMarkdownRendererWithTOC(t *testing.T) { c := newTestContentSpec() - ctx := &RenderingContext{RenderTOC: true, Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{RenderTOC: true, Cfg: c.Cfg, Config: c.BlackFriday} ctx.Content = []byte("testContent") actualRenderedMarkdown := c.markdownRender(ctx) expectedRenderedMarkdown := []byte("\n\n

testContent

\n") @@ -368,7 +368,7 @@ func TestGetMmarkExtensions(t *testing.T) { testFlag int } c := newTestContentSpec() - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} ctx.Config.Extensions = []string{"tables"} ctx.Config.ExtensionsMask = []string{""} allExtensions := []data{ @@ -397,7 +397,7 @@ func TestGetMmarkExtensions(t *testing.T) { func TestMmarkRender(t *testing.T) { c := newTestContentSpec() - ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday} + ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday} ctx.Content = []byte("testContent") actualRenderedMarkdown := c.mmarkRender(ctx) expectedRenderedMarkdown := []byte("

testContent

\n") diff --git a/helpers/general.go b/helpers/general.go index 00caf1ecc91..0ff911c31e7 100644 --- a/helpers/general.go +++ b/helpers/general.go @@ -92,7 +92,7 @@ func GuessType(in string) string { return "org" } - return "unknown" + return "" } // FirstUpper returns a string with the first character as upper case. diff --git a/helpers/general_test.go b/helpers/general_test.go index 1279df43948..b6c7a14d968 100644 --- a/helpers/general_test.go +++ b/helpers/general_test.go @@ -42,7 +42,7 @@ func TestGuessType(t *testing.T) { {"html", "html"}, {"htm", "html"}, {"org", "org"}, - {"excel", "unknown"}, + {"excel", ""}, } { result := GuessType(this.in) if result != this.expect { @@ -166,6 +166,27 @@ var containsAdditionalTestData = []struct { {"", []byte(""), false}, } +func TestSliceToLower(t *testing.T) { + t.Parallel() + tests := []struct { + value []string + expected []string + }{ + {[]string{"a", "b", "c"}, []string{"a", "b", "c"}}, + {[]string{"a", "B", "c"}, []string{"a", "b", "c"}}, + {[]string{"A", "B", "C"}, []string{"a", "b", "c"}}, + } + + for _, test := range tests { + res := SliceToLower(test.value) + for i, val := range res { + if val != test.expected[i] { + t.Errorf("Case mismatch. Expected %s, got %s", test.expected[i], res[i]) + } + } + } +} + func TestReaderContains(t *testing.T) { for i, this := range append(containsBenchTestData, containsAdditionalTestData...) { result := ReaderContains(strings.NewReader(this.v1), this.v2) diff --git a/helpers/path.go b/helpers/path.go index bf7e3bf9903..f3b04ce66a8 100644 --- a/helpers/path.go +++ b/helpers/path.go @@ -86,6 +86,13 @@ func (p *PathSpec) MakePath(s string) string { return p.UnicodeSanitize(s) } +// MakePathsSanitized applies MakePathSanitized on every item in the slice +func (p *PathSpec) MakePathsSanitized(paths []string) { + for i, path := range paths { + paths[i] = p.MakePathSanitized(path) + } +} + // MakePathSanitized creates a Unicode-sanitized string, with the spaces replaced func (p *PathSpec) MakePathSanitized(s string) string { if p.DisablePathToLower { diff --git a/helpers/pygments.go b/helpers/pygments.go index 4a90e353ded..abbbdce4cac 100644 --- a/helpers/pygments.go +++ b/helpers/pygments.go @@ -56,7 +56,7 @@ type highlighters struct { } func newHiglighters(cs *ContentSpec) highlighters { - return highlighters{cs: cs, ignoreCache: cs.cfg.GetBool("ignoreCache"), cacheDir: cs.cfg.GetString("cacheDir")} + return highlighters{cs: cs, ignoreCache: cs.Cfg.GetBool("ignoreCache"), cacheDir: cs.Cfg.GetString("cacheDir")} } func (h highlighters) chromaHighlight(code, lang, optsStr string) (string, error) { diff --git a/htesting/test_structs.go b/htesting/test_structs.go index f5aa6ff2513..8eda0ad38a5 100644 --- a/htesting/test_structs.go +++ b/htesting/test_structs.go @@ -14,8 +14,13 @@ package htesting import ( + "html/template" + "time" + "github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/langs" + "github.com/gohugoio/hugo/navigation" + "github.com/gohugoio/hugo/resources/page" "github.com/spf13/viper" ) @@ -28,6 +33,18 @@ func (t testSite) Hugo() hugo.Info { return t.h } +func (testSite) LastChange() (t time.Time) { + return +} + +func (t testSite) Title() string { + return "foo" +} + +func (t testSite) Sites() page.Sites { + return nil +} + func (t testSite) IsServer() bool { return false } @@ -36,8 +53,36 @@ func (t testSite) Language() *langs.Language { return t.l } +func (t testSite) Pages() page.Pages { + return nil +} + +func (t testSite) RegularPages() page.Pages { + return nil +} + +func (t testSite) Menus() navigation.Menus { + return nil +} + +func (t testSite) Taxonomies() interface{} { + return nil +} + +func (t testSite) BaseURL() template.URL { + return "" +} + +func (t testSite) Params() map[string]interface{} { + return nil +} + +func (t testSite) Data() map[string]interface{} { + return nil +} + // NewTestHugoSite creates a new minimal test site. -func NewTestHugoSite() hugo.Site { +func NewTestHugoSite() page.Site { return testSite{ h: hugo.NewInfo(hugo.EnvironmentProduction), l: langs.NewLanguage("en", newTestConfig()), diff --git a/hugolib/alias.go b/hugolib/alias.go index c44f32dbba1..a3819e6b7c3 100644 --- a/hugolib/alias.go +++ b/hugolib/alias.go @@ -26,6 +26,7 @@ import ( "github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/publisher" + "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/helpers" @@ -55,7 +56,13 @@ func newAliasHandler(t tpl.TemplateFinder, l *loggers.Logger, allowRoot bool) al return aliasHandler{t, l, allowRoot} } -func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (io.Reader, error) { +type aliasPage struct { + Permalink string + page.Page +} + +// TODO(bep) page isn't permalink == p.Permalink()? +func (a aliasHandler) renderAlias(isXHTML bool, permalink string, p page.Page) (io.Reader, error) { t := "alias" if isXHTML { t = "alias-xhtml" @@ -75,12 +82,9 @@ func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (i } } - data := struct { - Permalink string - Page *Page - }{ + data := aliasPage{ permalink, - page, + p, } buffer := new(bytes.Buffer) @@ -91,11 +95,11 @@ func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (i return buffer, nil } -func (s *Site) writeDestAlias(path, permalink string, outputFormat output.Format, p *Page) (err error) { +func (s *Site) writeDestAlias(path, permalink string, outputFormat output.Format, p page.Page) (err error) { return s.publishDestAlias(false, path, permalink, outputFormat, p) } -func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFormat output.Format, p *Page) (err error) { +func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFormat output.Format, p page.Page) (err error) { handler := newAliasHandler(s.Tmpl, s.Log, allowRoot) isXHTML := strings.HasSuffix(path, ".xhtml") @@ -126,19 +130,19 @@ func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFo func (a aliasHandler) targetPathAlias(src string) (string, error) { originalAlias := src if len(src) <= 0 { - return "", fmt.Errorf("Alias \"\" is an empty string") + return "", fmt.Errorf("alias \"\" is an empty string") } alias := filepath.Clean(src) components := strings.Split(alias, helpers.FilePathSeparator) if !a.allowRoot && alias == helpers.FilePathSeparator { - return "", fmt.Errorf("Alias \"%s\" resolves to website root directory", originalAlias) + return "", fmt.Errorf("alias \"%s\" resolves to website root directory", originalAlias) } // Validate against directory traversal if components[0] == ".." { - return "", fmt.Errorf("Alias \"%s\" traverses outside the website root directory", originalAlias) + return "", fmt.Errorf("alias \"%s\" traverses outside the website root directory", originalAlias) } // Handle Windows file and directory naming restrictions @@ -171,7 +175,7 @@ func (a aliasHandler) targetPathAlias(src string) (string, error) { for _, m := range msgs { a.log.ERROR.Println(m) } - return "", fmt.Errorf("Cannot create \"%s\": Windows filename restriction", originalAlias) + return "", fmt.Errorf("cannot create \"%s\": Windows filename restriction", originalAlias) } for _, m := range msgs { a.log.INFO.Println(m) diff --git a/hugolib/alias_test.go b/hugolib/alias_test.go index da1b80b7007..109d01f14ed 100644 --- a/hugolib/alias_test.go +++ b/hugolib/alias_test.go @@ -50,7 +50,7 @@ func TestAlias(t *testing.T) { b.CreateSites().Build(BuildCfg{}) assert.Equal(1, len(b.H.Sites)) - require.Len(t, b.H.Sites[0].RegularPages, 1) + require.Len(t, b.H.Sites[0].RegularPages(), 1) // the real page b.AssertFileContent("public/page/index.html", "For some moments the old man") diff --git a/hugolib/collections.go b/hugolib/collections.go index cf75d373221..d6abab29dbb 100644 --- a/hugolib/collections.go +++ b/hugolib/collections.go @@ -14,19 +14,13 @@ package hugolib import ( - "fmt" - - "github.com/gohugoio/hugo/resources/resource" - "github.com/gohugoio/hugo/common/collections" + "github.com/gohugoio/hugo/resources/page" ) var ( - _ collections.Grouper = (*Page)(nil) - _ collections.Slicer = (*Page)(nil) - _ collections.Slicer = PageGroup{} - _ collections.Slicer = WeightedPage{} - _ resource.ResourcesConverter = Pages{} + _ collections.Grouper = (*pageState)(nil) + _ collections.Slicer = (*pageState)(nil) ) // collections.Slicer implementations below. We keep these bridge implementations @@ -35,50 +29,8 @@ var ( // Slice is not meant to be used externally. It's a bridge function // for the template functions. See collections.Slice. -func (p *Page) Slice(items interface{}) (interface{}, error) { - return toPages(items) -} - -// Slice is not meant to be used externally. It's a bridge function -// for the template functions. See collections.Slice. -func (p PageGroup) Slice(in interface{}) (interface{}, error) { - switch items := in.(type) { - case PageGroup: - return items, nil - case []interface{}: - groups := make(PagesGroup, len(items)) - for i, v := range items { - g, ok := v.(PageGroup) - if !ok { - return nil, fmt.Errorf("type %T is not a PageGroup", v) - } - groups[i] = g - } - return groups, nil - default: - return nil, fmt.Errorf("invalid slice type %T", items) - } -} - -// Slice is not meant to be used externally. It's a bridge function -// for the template functions. See collections.Slice. -func (p WeightedPage) Slice(in interface{}) (interface{}, error) { - switch items := in.(type) { - case WeightedPages: - return items, nil - case []interface{}: - weighted := make(WeightedPages, len(items)) - for i, v := range items { - g, ok := v.(WeightedPage) - if !ok { - return nil, fmt.Errorf("type %T is not a WeightedPage", v) - } - weighted[i] = g - } - return weighted, nil - default: - return nil, fmt.Errorf("invalid slice type %T", items) - } +func (p *pageState) Slice(items interface{}) (interface{}, error) { + return page.ToPages(items) } // collections.Grouper implementations below @@ -86,19 +38,10 @@ func (p WeightedPage) Slice(in interface{}) (interface{}, error) { // Group creates a PageGroup from a key and a Pages object // This method is not meant for external use. It got its non-typed arguments to satisfy // a very generic interface in the tpl package. -func (p *Page) Group(key interface{}, in interface{}) (interface{}, error) { - pages, err := toPages(in) +func (p *pageState) Group(key interface{}, in interface{}) (interface{}, error) { + pages, err := page.ToPages(in) if err != nil { return nil, err } - return PageGroup{Key: key, Pages: pages}, nil -} - -// ToResources wraps resource.ResourcesConverter -func (pages Pages) ToResources() resource.Resources { - r := make(resource.Resources, len(pages)) - for i, p := range pages { - r[i] = p - } - return r + return page.PageGroup{Key: key, Pages: pages}, nil } diff --git a/hugolib/collections_test.go b/hugolib/collections_test.go index 9cf328a05f6..0cd936aef3e 100644 --- a/hugolib/collections_test.go +++ b/hugolib/collections_test.go @@ -40,7 +40,7 @@ title: "Page" b.CreateSites().Build(BuildCfg{}) assert.Equal(1, len(b.H.Sites)) - require.Len(t, b.H.Sites[0].RegularPages, 2) + require.Len(t, b.H.Sites[0].RegularPages(), 2) b.AssertFileContent("public/index.html", "cool: 2") } @@ -79,12 +79,12 @@ tags_weight: %d b.CreateSites().Build(BuildCfg{}) assert.Equal(1, len(b.H.Sites)) - require.Len(t, b.H.Sites[0].RegularPages, 2) + require.Len(t, b.H.Sites[0].RegularPages(), 2) b.AssertFileContent("public/index.html", - "pages:2:hugolib.Pages:Page(/page1.md)/Page(/page2.md)", - "pageGroups:2:hugolib.PagesGroup:Page(/page1.md)/Page(/page2.md)", - `weightedPages:2::hugolib.WeightedPages:[WeightedPage(10,"Page") WeightedPage(20,"Page")]`) + "pages:2:page.Pages:Page(/page1.md)/Page(/page2.md)", + "pageGroups:2:page.PagesGroup:Page(/page1.md)/Page(/page2.md)", + `weightedPages:2::page.WeightedPages:[WeightedPage(10,"Page") WeightedPage(20,"Page")]`) } func TestAppendFunc(t *testing.T) { @@ -129,11 +129,11 @@ tags_weight: %d b.CreateSites().Build(BuildCfg{}) assert.Equal(1, len(b.H.Sites)) - require.Len(t, b.H.Sites[0].RegularPages, 2) + require.Len(t, b.H.Sites[0].RegularPages(), 2) b.AssertFileContent("public/index.html", - "pages:2:hugolib.Pages:Page(/page2.md)/Page(/page1.md)", - "appendPages:9:hugolib.Pages:home/page", + "pages:2:page.Pages:Page(/page2.md)/Page(/page1.md)", + "appendPages:9:page.Pages:home/page", "appendStrings:[]string:[a b c d e]", "appendStringsSlice:[]string:[a b c c d]", "union:[]string:[a b c d e]", diff --git a/hugolib/config.go b/hugolib/config.go index 6a1de32beec..0c2d2952342 100644 --- a/hugolib/config.go +++ b/hugolib/config.go @@ -240,11 +240,6 @@ func (l configLoader) wrapFileError(err error, filename string) error { return err } -func (l configLoader) newRealBaseFs(path string) afero.Fs { - return hugofs.NewBasePathRealFilenameFs(afero.NewBasePathFs(l.Fs, path).(*afero.BasePathFs)) - -} - func (l configLoader) loadConfigFromConfigDir(v *viper.Viper) ([]string, error) { sourceFs := l.Fs configDir := l.AbsConfigDir @@ -274,7 +269,7 @@ func (l configLoader) loadConfigFromConfigDir(v *viper.Viper) ([]string, error) for _, configDir := range configDirs { err := afero.Walk(sourceFs, configDir, func(path string, fi os.FileInfo, err error) error { - if fi == nil { + if fi == nil || err != nil { return nil } @@ -616,8 +611,8 @@ func loadDefaultSettingsFor(v *viper.Viper) error { v.SetDefault("removePathAccents", false) v.SetDefault("titleCaseStyle", "AP") v.SetDefault("taxonomies", map[string]string{"tag": "tags", "category": "categories"}) - v.SetDefault("permalinks", make(PermalinkOverrides, 0)) - v.SetDefault("sitemap", Sitemap{Priority: -1, Filename: "sitemap.xml"}) + v.SetDefault("permalinks", make(map[string]string)) + v.SetDefault("sitemap", config.Sitemap{Priority: -1, Filename: "sitemap.xml"}) v.SetDefault("pygmentsStyle", "monokai") v.SetDefault("pygmentsUseClasses", false) v.SetDefault("pygmentsCodeFences", false) @@ -625,7 +620,6 @@ func loadDefaultSettingsFor(v *viper.Viper) error { v.SetDefault("pygmentsOptions", "") v.SetDefault("disableLiveReload", false) v.SetDefault("pluralizeListTitles", true) - v.SetDefault("preserveTaxonomyNames", false) v.SetDefault("forceSyncStatic", false) v.SetDefault("footnoteAnchorPrefix", "") v.SetDefault("footnoteReturnLinkContents", "") diff --git a/hugolib/datafiles_test.go b/hugolib/datafiles_test.go index 6685de4cc61..eed1f3e417e 100644 --- a/hugolib/datafiles_test.go +++ b/hugolib/datafiles_test.go @@ -349,7 +349,7 @@ func doTestDataDirImpl(t *testing.T, dd dataDir, expected interface{}, configKey s := buildSingleSiteExpected(t, false, expectBuildError, depsCfg, BuildCfg{SkipRender: true}) - if !expectBuildError && !reflect.DeepEqual(expected, s.Data) { + if !expectBuildError && !reflect.DeepEqual(expected, s.h.Data()) { // This disabled code detects the situation described in the WARNING message below. // The situation seems to only occur for TOML data with integer values. // Perhaps the TOML parser returns ints in another type. @@ -366,7 +366,7 @@ func doTestDataDirImpl(t *testing.T, dd dataDir, expected interface{}, configKey } */ - return fmt.Sprintf("Expected data:\n%v got\n%v\n\nExpected type structure:\n%#[1]v got\n%#[2]v", expected, s.Data) + return fmt.Sprintf("Expected data:\n%v got\n%v\n\nExpected type structure:\n%#[1]v got\n%#[2]v", expected, s.h.Data()) } return diff --git a/hugolib/disableKinds_test.go b/hugolib/disableKinds_test.go index edada141912..bce88ed0d36 100644 --- a/hugolib/disableKinds_test.go +++ b/hugolib/disableKinds_test.go @@ -18,6 +18,8 @@ import ( "fmt" + "github.com/gohugoio/hugo/resources/page" + "github.com/gohugoio/hugo/deps" "github.com/spf13/afero" @@ -33,13 +35,13 @@ func TestDisableKindsNoneDisabled(t *testing.T) { func TestDisableKindsSomeDisabled(t *testing.T) { t.Parallel() - doTestDisableKinds(t, KindSection, kind404) + doTestDisableKinds(t, page.KindSection, kind404) } func TestDisableKindsOneDisabled(t *testing.T) { t.Parallel() for _, kind := range allKinds { - if kind == KindPage { + if kind == page.KindPage { // Turning off regular page generation have some side-effects // not handled by the assertions below (no sections), so // skip that for now. @@ -124,64 +126,64 @@ func assertDisabledKinds(th testHelper, s *Site, disabled ...string) { assertDisabledKind(th, func(isDisabled bool) bool { if isDisabled { - return len(s.RegularPages) == 0 + return len(s.RegularPages()) == 0 } - return len(s.RegularPages) > 0 - }, disabled, KindPage, "public/sect/p1/index.html", "Single|P1") + return len(s.RegularPages()) > 0 + }, disabled, page.KindPage, "public/sect/p1/index.html", "Single|P1") assertDisabledKind(th, func(isDisabled bool) bool { - p := s.getPage(KindHome) + p := s.getPage(page.KindHome) if isDisabled { return p == nil } return p != nil - }, disabled, KindHome, "public/index.html", "Home") + }, disabled, page.KindHome, "public/index.html", "Home") assertDisabledKind(th, func(isDisabled bool) bool { - p := s.getPage(KindSection, "sect") + p := s.getPage(page.KindSection, "sect") if isDisabled { return p == nil } return p != nil - }, disabled, KindSection, "public/sect/index.html", "Sects") + }, disabled, page.KindSection, "public/sect/index.html", "Sects") assertDisabledKind(th, func(isDisabled bool) bool { - p := s.getPage(KindTaxonomy, "tags", "tag1") + p := s.getPage(page.KindTaxonomy, "tags", "tag1") if isDisabled { return p == nil } return p != nil - }, disabled, KindTaxonomy, "public/tags/tag1/index.html", "Tag1") + }, disabled, page.KindTaxonomy, "public/tags/tag1/index.html", "Tag1") assertDisabledKind(th, func(isDisabled bool) bool { - p := s.getPage(KindTaxonomyTerm, "tags") + p := s.getPage(page.KindTaxonomyTerm, "tags") if isDisabled { return p == nil } return p != nil - }, disabled, KindTaxonomyTerm, "public/tags/index.html", "Tags") + }, disabled, page.KindTaxonomyTerm, "public/tags/index.html", "Tags") assertDisabledKind(th, func(isDisabled bool) bool { - p := s.getPage(KindTaxonomyTerm, "categories") + p := s.getPage(page.KindTaxonomyTerm, "categories") if isDisabled { return p == nil } return p != nil - }, disabled, KindTaxonomyTerm, "public/categories/index.html", "Category Terms") + }, disabled, page.KindTaxonomyTerm, "public/categories/index.html", "Category Terms") assertDisabledKind(th, func(isDisabled bool) bool { - p := s.getPage(KindTaxonomy, "categories", "hugo") + p := s.getPage(page.KindTaxonomy, "categories", "hugo") if isDisabled { return p == nil } return p != nil - }, disabled, KindTaxonomy, "public/categories/hugo/index.html", "Hugo") + }, disabled, page.KindTaxonomy, "public/categories/hugo/index.html", "Hugo") // The below have no page in any collection. assertDisabledKind(th, func(isDisabled bool) bool { return true }, disabled, kindRSS, "public/index.xml", "") assertDisabledKind(th, func(isDisabled bool) bool { return true }, disabled, kindSitemap, "public/sitemap.xml", "sitemap") @@ -195,7 +197,7 @@ func assertDisabledKind(th testHelper, kindAssert func(bool) bool, disabled []st if kind == kindRSS && !isDisabled { // If the home page is also disabled, there is not RSS to look for. - if stringSliceContains(KindHome, disabled...) { + if stringSliceContains(page.KindHome, disabled...) { isDisabled = true } } diff --git a/hugolib/embedded_shortcodes_test.go b/hugolib/embedded_shortcodes_test.go index 3a6220b532f..3ec6947414e 100644 --- a/hugolib/embedded_shortcodes_test.go +++ b/hugolib/embedded_shortcodes_test.go @@ -20,6 +20,8 @@ import ( "strings" "testing" + "github.com/spf13/cast" + "path/filepath" "github.com/gohugoio/hugo/deps" @@ -67,9 +69,11 @@ func doTestShortcodeCrossrefs(t *testing.T, relative bool) { s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{}) - require.Len(t, s.RegularPages, 1) + require.Len(t, s.RegularPages(), 1) - output := string(s.RegularPages[0].content()) + content, err := s.RegularPages()[0].Content() + require.NoError(t, err) + output := cast.ToString(content) if !strings.Contains(output, expected) { t.Errorf("Got\n%q\nExpected\n%q", output, expected) diff --git a/hugolib/gitinfo.go b/hugolib/gitinfo.go index d356fcf075e..bd50f10b89a 100644 --- a/hugolib/gitinfo.go +++ b/hugolib/gitinfo.go @@ -19,6 +19,7 @@ import ( "github.com/bep/gitmap" "github.com/gohugoio/hugo/config" + "github.com/gohugoio/hugo/resources/page" ) type gitInfo struct { @@ -26,15 +27,12 @@ type gitInfo struct { repo *gitmap.GitRepo } -func (g *gitInfo) forPage(p *Page) (*gitmap.GitInfo, bool) { - if g == nil { - return nil, false - } - - name := strings.TrimPrefix(filepath.ToSlash(p.Filename()), g.contentDir) +func (g *gitInfo) forPage(p page.Page) *gitmap.GitInfo { + name := strings.TrimPrefix(filepath.ToSlash(p.File().Filename()), g.contentDir) name = strings.TrimPrefix(name, "/") - return g.repo.Files[name], true + return g.repo.Files[name] + } func newGitInfo(cfg config.Provider) (*gitInfo, error) { diff --git a/hugolib/hugo_sites.go b/hugolib/hugo_sites.go index 9ce1c438e75..db939780dc0 100644 --- a/hugolib/hugo_sites.go +++ b/hugolib/hugo_sites.go @@ -14,14 +14,23 @@ package hugolib import ( - "errors" "io" "path/filepath" "sort" "strings" "sync" + "github.com/gohugoio/hugo/parser/metadecoders" + + "github.com/gohugoio/hugo/hugofs" + + "github.com/pkg/errors" + + "github.com/gohugoio/hugo/source" + + "github.com/bep/gitmap" "github.com/gohugoio/hugo/config" + "github.com/spf13/afero" "github.com/gohugoio/hugo/publisher" @@ -30,8 +39,10 @@ import ( "github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/langs" + "github.com/gohugoio/hugo/lazy" "github.com/gohugoio/hugo/i18n" + "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/tpl/tplimpl" ) @@ -50,15 +61,85 @@ type HugoSites struct { *deps.Deps + gitInfo *gitInfo + + // As loaded from the /data dirs + data map[string]interface{} + // Keeps track of bundle directories and symlinks to enable partial rebuilding. ContentChanges *contentChangeMap - // If enabled, keeps a revision map for all content. - gitInfo *gitInfo + init *hugoSitesInit + + *fatalErrorHandler +} + +type fatalErrorHandler struct { + mu sync.Mutex + + h *HugoSites + + done bool + donec chan bool // will be closed when done +} + +func (f *fatalErrorHandler) FatalError(err error) { + f.mu.Lock() + defer f.mu.Unlock() + if !f.done { + f.done = true + close(f.donec) + } + + // TODO(bep) page error context + f.h.DistinctErrorLog.Println(err) + +} + +func (f *fatalErrorHandler) Done() <-chan bool { + return f.donec +} + +type hugoSitesInit struct { + // Loads the data from all of the /data folders. + data *lazy.Init + + // Loads the Git info for all the pages if enabled. + gitInfo *lazy.Init + + // Maps page translations. + translations *lazy.Init +} + +func (h *hugoSitesInit) Reset() { + h.data.Reset() + h.gitInfo.Reset() + h.translations.Reset() } -func (h *HugoSites) siteInfos() SiteInfos { - infos := make(SiteInfos, len(h.Sites)) +func (h *HugoSites) Data() map[string]interface{} { + if _, err := h.init.data.Do(); err != nil { + // TODO(bep) page use SendError for these + h.Log.ERROR.Printf("Failed to load data: %s", err) + return nil + } + return h.data +} + +func (h *HugoSites) gitInfoForPage(p page.Page) (*gitmap.GitInfo, error) { + if _, err := h.init.gitInfo.Do(); err != nil { + return nil, err + } + + if h.gitInfo == nil { + return nil, nil + } + + return h.gitInfo.forPage(p), nil +} + +func (h *HugoSites) siteInfos() page.Sites { + infos := make(page.Sites, len(h.Sites)) for i, site := range h.Sites { infos[i] = &site.Info } @@ -106,7 +187,7 @@ func (h *HugoSites) IsMultihost() bool { func (h *HugoSites) LanguageSet() map[string]bool { set := make(map[string]bool) for _, s := range h.Sites { - set[s.Language.Lang] = true + set[s.language.Lang] = true } return set } @@ -129,14 +210,14 @@ func (h *HugoSites) PrintProcessingStats(w io.Writer) { func (h *HugoSites) langSite() map[string]*Site { m := make(map[string]*Site) for _, s := range h.Sites { - m[s.Language.Lang] = s + m[s.language.Lang] = s } return m } // GetContentPage finds a Page with content given the absolute filename. // Returns nil if none found. -func (h *HugoSites) GetContentPage(filename string) *Page { +func (h *HugoSites) GetContentPage(filename string) page.Page { for _, s := range h.Sites { pos := s.rawAllPages.findPagePosByFilename(filename) if pos == -1 { @@ -178,10 +259,41 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) { running: cfg.Running, multilingual: langConfig, multihost: cfg.Cfg.GetBool("multihost"), - Sites: sites} + Sites: sites, + init: &hugoSitesInit{ + data: lazy.New(), + gitInfo: lazy.New(), + translations: lazy.New(), + }, + } + + // TODO(bep) page rebuilds + h.fatalErrorHandler = &fatalErrorHandler{ + h: h, + donec: make(chan bool), + } + + h.init.data.Add(func() (interface{}, error) { + err := h.loadData(h.PathSpec.BaseFs.Data.Fs) + return err, nil + }) + + h.init.translations.Add(func() (interface{}, error) { + if len(h.Sites) > 1 { + allTranslations := pagesToTranslationsMap(h.Sites) + assignTranslationsToPages(allTranslations, h.Sites) + } + + return nil, nil + }) + + h.init.gitInfo.Add(func() (interface{}, error) { + err := h.loadGitInfo() + return nil, err + }) for _, s := range sites { - s.owner = h + s.h = h } if err := applyDeps(cfg, sites...); err != nil { @@ -197,14 +309,10 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) { h.ContentChanges = contentChangeTracker } - if err := h.initGitInfo(); err != nil { - return nil, err - } - return h, nil } -func (h *HugoSites) initGitInfo() error { +func (h *HugoSites) loadGitInfo() error { if h.Cfg.GetBool("enableGitInfo") { gi, err := newGitInfo(h.Cfg) if err != nil { @@ -247,16 +355,16 @@ func applyDeps(cfg deps.DepsCfg, sites ...*Site) error { d.Site = &s.Info - siteConfig, err := loadSiteConfig(s.Language) + siteConfig, err := loadSiteConfig(s.language) if err != nil { return err } - s.siteConfig = siteConfig - s.siteRefLinker, err = newSiteRefLinker(s.Language, s) + s.siteConfigConfig = siteConfig + s.siteRefLinker, err = newSiteRefLinker(s.language, s) return err } - cfg.Language = s.Language + cfg.Language = s.language cfg.MediaTypes = s.mediaTypesConfig cfg.OutputFormats = s.outputFormatsConfig @@ -387,7 +495,7 @@ func (h *HugoSites) createSitesFromConfig(cfg config.Provider) error { h.Sites = sites for _, s := range sites { - s.owner = h + s.h = h } if err := applyDeps(depsCfg, sites...); err != nil { @@ -435,7 +543,7 @@ type BuildCfg struct { // Note that a page does not have to have a content page / file. // For regular builds, this will allways return true. // TODO(bep) rename/work this. -func (cfg *BuildCfg) shouldRender(p *Page) bool { +func (cfg *BuildCfg) shouldRender(p *pageState) bool { if p.forceRender { p.forceRender = false return true @@ -445,15 +553,8 @@ func (cfg *BuildCfg) shouldRender(p *Page) bool { return true } - if cfg.RecentlyVisited[p.RelPermalink()] { - if cfg.PartialReRender { - _ = p.initMainOutputFormat() - } - return true - } - - if cfg.whatChanged != nil && p.File != nil { - return cfg.whatChanged.files[p.File.Filename()] + if cfg.whatChanged != nil && p.File() != nil { + return cfg.whatChanged.files[p.File().Filename()] } return false @@ -477,100 +578,85 @@ func (h *HugoSites) renderCrossSitesArtifacts() error { return nil } - // TODO(bep) DRY - sitemapDefault := parseSitemap(h.Cfg.GetStringMap("sitemap")) - s := h.Sites[0] smLayouts := []string{"sitemapindex.xml", "_default/sitemapindex.xml", "_internal/_default/sitemapindex.xml"} return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemapindex", - sitemapDefault.Filename, h.toSiteInfos(), smLayouts...) -} - -func (h *HugoSites) assignMissingTranslations() error { - - // This looks heavy, but it should be a small number of nodes by now. - allPages := h.findAllPagesByKindNotIn(KindPage) - for _, nodeType := range []string{KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm} { - nodes := h.findPagesByKindIn(nodeType, allPages) - - // Assign translations - for _, t1 := range nodes { - for _, t2 := range nodes { - if t1.isNewTranslation(t2) { - t1.translations = append(t1.translations, t2) - } - } - } - } - - // Now we can sort the translations. - for _, p := range allPages { - if len(p.translations) > 0 { - pageBy(languagePageSort).Sort(p.translations) - } - } - return nil - + s.siteConfigHolder.sitemap.Filename, h.toSiteInfos(), smLayouts...) } // createMissingPages creates home page, taxonomies etc. that isnt't created as an // effect of having a content file. func (h *HugoSites) createMissingPages() error { - var newPages Pages + var newPages pageStatePages for _, s := range h.Sites { - if s.isEnabled(KindHome) { + if s.isEnabled(page.KindHome) { // home pages - home := s.findPagesByKind(KindHome) - if len(home) > 1 { + homes := s.findWorkPagesByKind(page.KindHome) + if len(homes) > 1 { panic("Too many homes") } - if len(home) == 0 { - n := s.newHomePage() - s.Pages = append(s.Pages, n) - newPages = append(newPages, n) + var home *pageState + if len(homes) == 0 { + home = s.newPage(page.KindHome) + s.workAllPages = append(s.workAllPages, home) + newPages = append(newPages, home) + } else { + home = homes[0] } + + s.home = home } // Will create content-less root sections. newSections := s.assembleSections() - s.Pages = append(s.Pages, newSections...) + s.workAllPages = append(s.workAllPages, newSections...) newPages = append(newPages, newSections...) + taxonomyTermEnabled := s.isEnabled(page.KindTaxonomyTerm) + taxonomyEnabled := s.isEnabled(page.KindTaxonomy) + // taxonomy list and terms pages - taxonomies := s.Language.GetStringMapString("taxonomies") + taxonomies := s.Language().GetStringMapString("taxonomies") if len(taxonomies) > 0 { - taxonomyPages := s.findPagesByKind(KindTaxonomy) - taxonomyTermsPages := s.findPagesByKind(KindTaxonomyTerm) + taxonomyPages := s.findWorkPagesByKind(page.KindTaxonomy) + taxonomyTermsPages := s.findWorkPagesByKind(page.KindTaxonomyTerm) + + // Make them navigable from WeightedPage etc. + for _, p := range taxonomyPages { + p.getTaxonomyNodeInfo().TransferValues(p) + } + for _, p := range taxonomyTermsPages { + p.getTaxonomyNodeInfo().TransferValues(p) + } + for _, plural := range taxonomies { - if s.isEnabled(KindTaxonomyTerm) { + if taxonomyTermEnabled { foundTaxonomyTermsPage := false for _, p := range taxonomyTermsPages { - if p.sectionsPath() == plural { + if p.SectionsPath() == plural { foundTaxonomyTermsPage = true break } } if !foundTaxonomyTermsPage { - n := s.newTaxonomyTermsPage(plural) - s.Pages = append(s.Pages, n) + n := s.newPage(page.KindTaxonomyTerm, plural) + n.getTaxonomyNodeInfo().TransferValues(n) + s.workAllPages = append(s.workAllPages, n) newPages = append(newPages, n) } } - if s.isEnabled(KindTaxonomy) { - for key := range s.Taxonomies[plural] { + if taxonomyEnabled { + for termKey := range s.Taxonomies[plural] { + foundTaxonomyPage := false - origKey := key - if s.Info.preserveTaxonomyNames { - key = s.PathSpec.MakePathSanitized(key) - } for _, p := range taxonomyPages { - sectionsPath := p.sectionsPath() + sectionsPath := p.SectionsPath() if !strings.HasPrefix(sectionsPath, plural) { continue @@ -579,20 +665,21 @@ func (h *HugoSites) createMissingPages() error { singularKey := strings.TrimPrefix(sectionsPath, plural) singularKey = strings.TrimPrefix(singularKey, "/") - // Some people may have /authors/MaxMustermann etc. as paths. - // p.sections contains the raw values from the file system. - // See https://github.com/gohugoio/hugo/issues/4238 - singularKey = s.PathSpec.MakePathSanitized(singularKey) - - if singularKey == key { + if singularKey == termKey { foundTaxonomyPage = true break } } if !foundTaxonomyPage { - n := s.newTaxonomyPage(plural, origKey) - s.Pages = append(s.Pages, n) + info := s.taxonomyNodes.Get(plural, termKey) + if info == nil { + panic("no info found") + } + + n := s.newTaxonomyPage(info.term, info.plural, info.termKey) + info.TransferValues(n) + s.workAllPages = append(s.workAllPages, n) newPages = append(newPages, n) } } @@ -601,24 +688,6 @@ func (h *HugoSites) createMissingPages() error { } } - if len(newPages) > 0 { - // This resorting is unfortunate, but it also needs to be sorted - // when sections are created. - first := h.Sites[0] - - first.AllPages = append(first.AllPages, newPages...) - - first.AllPages.sort() - - for _, s := range h.Sites { - s.Pages.sort() - } - - for i := 1; i < len(h.Sites); i++ { - h.Sites[i].AllPages = first.AllPages - } - } - return nil } @@ -628,61 +697,58 @@ func (h *HugoSites) removePageByFilename(filename string) { } } -func (h *HugoSites) setupTranslations() { +func (h *HugoSites) createPageCollections() error { for _, s := range h.Sites { for _, p := range s.rawAllPages { - if p.Kind == kindUnknown { - p.Kind = p.kindFromSections() - } - - if !p.s.isEnabled(p.Kind) { + if !s.isEnabled(p.Kind()) { continue } - shouldBuild := p.shouldBuild() - s.updateBuildStats(p) + shouldBuild := s.shouldBuild(p) + s.buildStats.update(p) if shouldBuild { - if p.headless { + if p.m.headless { s.headlessPages = append(s.headlessPages, p) } else { - s.Pages = append(s.Pages, p) + s.workAllPages = append(s.workAllPages, p) } } } } - allPages := make(Pages, 0) + allPages := newLazyPagesFactory(func() page.Pages { + var pages page.Pages + for _, s := range h.Sites { + pages = append(pages, s.Pages()...) + } - for _, s := range h.Sites { - allPages = append(allPages, s.Pages...) - } + page.SortByDefault(pages) - allPages.sort() + return pages + }) - for _, s := range h.Sites { - s.AllPages = allPages - } + allRegularPages := newLazyPagesFactory(func() page.Pages { + return h.findPagesByKindIn(page.KindPage, allPages.get()) + }) - // Pull over the collections from the master site - for i := 1; i < len(h.Sites); i++ { - h.Sites[i].Data = h.Sites[0].Data + for _, s := range h.Sites { + s.PageCollections.allPages = allPages + s.PageCollections.allRegularPages = allRegularPages } - if len(h.Sites) > 1 { - allTranslations := pagesToTranslationsMap(allPages) - assignTranslationsToPages(allTranslations, allPages) - } + return nil } -func (s *Site) preparePagesForRender(start bool) error { - for _, p := range s.Pages { - if err := p.prepareForRender(start); err != nil { +func (s *Site) preparePagesForRender(idx int, start bool) error { + + for _, p := range s.workAllPages { + if err := p.initOutputFormat(idx, start); err != nil { return err } } for _, p := range s.headlessPages { - if err := p.prepareForRender(start); err != nil { + if err := p.initOutputFormat(idx, start); err != nil { return err } } @@ -691,62 +757,141 @@ func (s *Site) preparePagesForRender(start bool) error { } // Pages returns all pages for all sites. -func (h *HugoSites) Pages() Pages { - return h.Sites[0].AllPages +func (h *HugoSites) Pages() page.Pages { + return h.Sites[0].AllPages() } -func handleShortcodes(p *PageWithoutContent, rawContentCopy []byte) ([]byte, error) { - if p.shortcodeState != nil && p.shortcodeState.contentShortcodes.Len() > 0 { - p.s.Log.DEBUG.Printf("Replace %d shortcodes in %q", p.shortcodeState.contentShortcodes.Len(), p.BaseFileName()) - err := p.shortcodeState.executeShortcodesForDelta(p) +func (h *HugoSites) loadData(fs afero.Fs) (err error) { + spec := source.NewSourceSpec(h.PathSpec, fs) + fileSystem := spec.NewFilesystem("") + h.data = make(map[string]interface{}) + for _, r := range fileSystem.Files() { + if err := h.handleDataFile(r); err != nil { + return err + } + } + + return +} - if err != nil { +func (h *HugoSites) handleDataFile(r source.ReadableFile) error { + var current map[string]interface{} - return rawContentCopy, err + f, err := r.Open() + if err != nil { + return errors.Wrapf(err, "Failed to open data file %q:", r.LogicalName()) + } + defer f.Close() + + // Crawl in data tree to insert data + current = h.data + keyParts := strings.Split(r.Dir(), helpers.FilePathSeparator) + // The first path element is the virtual folder (typically theme name), which is + // not part of the key. + if len(keyParts) > 1 { + for _, key := range keyParts[1:] { + if key != "" { + if _, ok := current[key]; !ok { + current[key] = make(map[string]interface{}) + } + current = current[key].(map[string]interface{}) + } } + } - rawContentCopy, err = replaceShortcodeTokens(rawContentCopy, shortcodePlaceholderPrefix, p.shortcodeState.renderedShortcodes) + data, err := h.readData(r) + if err != nil { + return h.errWithFileContext(err, r) + } - if err != nil { - p.s.Log.FATAL.Printf("Failed to replace shortcode tokens in %s:\n%s", p.BaseFileName(), err.Error()) + if data == nil { + return nil + } + + // filepath.Walk walks the files in lexical order, '/' comes before '.' + // this warning could happen if + // 1. A theme uses the same key; the main data folder wins + // 2. A sub folder uses the same key: the sub folder wins + higherPrecedentData := current[r.BaseFileName()] + + switch data.(type) { + case nil: + // hear the crickets? + + case map[string]interface{}: + + switch higherPrecedentData.(type) { + case nil: + current[r.BaseFileName()] = data + case map[string]interface{}: + // merge maps: insert entries from data for keys that + // don't already exist in higherPrecedentData + higherPrecedentMap := higherPrecedentData.(map[string]interface{}) + for key, value := range data.(map[string]interface{}) { + if _, exists := higherPrecedentMap[key]; exists { + h.Log.WARN.Printf("Data for key '%s' in path '%s' is overridden by higher precedence data already in the data tree", key, r.Path()) + } else { + higherPrecedentMap[key] = value + } + } + default: + // can't merge: higherPrecedentData is not a map + h.Log.WARN.Printf("The %T data from '%s' overridden by "+ + "higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData) + } + + case []interface{}: + if higherPrecedentData == nil { + current[r.BaseFileName()] = data + } else { + // we don't merge array data + h.Log.WARN.Printf("The %T data from '%s' overridden by "+ + "higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData) } + + default: + h.Log.ERROR.Printf("unexpected data type %T in file %s", data, r.LogicalName()) } - return rawContentCopy, nil + return nil } -func (s *Site) updateBuildStats(page *Page) { - if page.IsDraft() { - s.draftCount++ +func (h *HugoSites) errWithFileContext(err error, f source.File) error { + rfi, ok := f.FileInfo().(hugofs.RealFilenameInfo) + if !ok { + return err } - if page.IsFuture() { - s.futureCount++ - } + realFilename := rfi.RealFilename() - if page.IsExpired() { - s.expiredCount++ - } -} + err, _ = herrors.WithFileContextForFile( + err, + realFilename, + realFilename, + h.SourceSpec.Fs.Source, + herrors.SimpleLineMatcher) -func (h *HugoSites) findPagesByKindNotIn(kind string, inPages Pages) Pages { - return h.Sites[0].findPagesByKindNotIn(kind, inPages) + return err } -func (h *HugoSites) findPagesByKindIn(kind string, inPages Pages) Pages { - return h.Sites[0].findPagesByKindIn(kind, inPages) -} +func (h *HugoSites) readData(f source.ReadableFile) (interface{}, error) { + file, err := f.Open() + if err != nil { + return nil, errors.Wrap(err, "readData: failed to open data file") + } + defer file.Close() + content := helpers.ReaderToBytes(file) -func (h *HugoSites) findAllPagesByKind(kind string) Pages { - return h.findPagesByKindIn(kind, h.Sites[0].AllPages) + format := metadecoders.FormatFromString(f.Extension()) + return metadecoders.Default.Unmarshal(content, format) } -func (h *HugoSites) findAllPagesByKindNotIn(kind string) Pages { - return h.findPagesByKindNotIn(kind, h.Sites[0].AllPages) +func (h *HugoSites) findPagesByKindIn(kind string, inPages page.Pages) page.Pages { + return h.Sites[0].findPagesByKindIn(kind, inPages) } -func (h *HugoSites) findPagesByShortcode(shortcode string) Pages { - var pages Pages +func (h *HugoSites) findPagesByShortcode(shortcode string) page.Pages { + var pages page.Pages for _, s := range h.Sites { pages = append(pages, s.findPagesByShortcode(shortcode)...) } diff --git a/hugolib/hugo_sites_build.go b/hugolib/hugo_sites_build.go index ec5070fa814..7fae0dd7288 100644 --- a/hugolib/hugo_sites_build.go +++ b/hugolib/hugo_sites_build.go @@ -16,6 +16,7 @@ package hugolib import ( "bytes" "fmt" + "sort" "errors" @@ -71,7 +72,7 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { return err } } else { - if err := h.init(conf); err != nil { + if err := h.initSites(conf); err != nil { return err } } @@ -132,7 +133,7 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { // Build lifecycle methods below. // The order listed matches the order of execution. -func (h *HugoSites) init(config *BuildCfg) error { +func (h *HugoSites) initSites(config *BuildCfg) error { for _, s := range h.Sites { if s.PageCollections == nil { @@ -144,6 +145,11 @@ func (h *HugoSites) init(config *BuildCfg) error { h.reset() } + if config.ResetState || config.NewConfig != nil { + // TODO(bep) page consolidate + h.init.Reset() + } + if config.NewConfig != nil { if err := h.createSitesFromConfig(config.NewConfig); err != nil { return err @@ -155,22 +161,15 @@ func (h *HugoSites) init(config *BuildCfg) error { func (h *HugoSites) initRebuild(config *BuildCfg) error { if config.NewConfig != nil { - return errors.New("Rebuild does not support 'NewConfig'.") + return errors.New("rebuild does not support 'NewConfig'.") } if config.ResetState { - return errors.New("Rebuild does not support 'ResetState'.") + return errors.New("rebuild does not support 'ResetState'.") } if !h.running { - return errors.New("Rebuild called when not in watch mode") - } - - if config.whatChanged.source { - // This is for the non-renderable content pages (rarely used, I guess). - // We could maybe detect if this is really needed, but it should be - // pretty fast. - h.TemplateHandler().RebuildClone() + return errors.New("rebuild called when not in watch mode") } for _, s := range h.Sites { @@ -203,14 +202,6 @@ func (h *HugoSites) process(config *BuildCfg, events ...fsnotify.Event) error { } func (h *HugoSites) assemble(config *BuildCfg) error { - if config.whatChanged.source { - for _, s := range h.Sites { - s.createTaxonomiesEntries() - } - } - - // TODO(bep) we could probably wait and do this in one go later - h.setupTranslations() if len(h.Sites) > 1 { // The first is initialized during process; initialize the rest @@ -221,47 +212,26 @@ func (h *HugoSites) assemble(config *BuildCfg) error { } } + if err := h.createPageCollections(); err != nil { + return err + } + if config.whatChanged.source { for _, s := range h.Sites { - if err := s.buildSiteMeta(); err != nil { + if err := s.assembleTaxonomies(); err != nil { return err } } } + // Create pagexs for the section pages etc. without content file. if err := h.createMissingPages(); err != nil { return err } for _, s := range h.Sites { - for _, pages := range []Pages{s.Pages, s.headlessPages} { - for _, p := range pages { - // May have been set in front matter - if len(p.outputFormats) == 0 { - p.outputFormats = s.outputFormats[p.Kind] - } - - if p.headless { - // headless = 1 output format only - p.outputFormats = p.outputFormats[:1] - } - for _, r := range p.Resources.ByType(pageResourceType) { - r.(*Page).outputFormats = p.outputFormats - } - - if err := p.initPaths(); err != nil { - return err - } - - } - } - s.assembleMenus() - s.refreshPageCaches() s.setupSitePages() - } - - if err := h.assignMissingTranslations(); err != nil { - return err + sort.Stable(s.workAllPages) } return nil @@ -276,35 +246,47 @@ func (h *HugoSites) render(config *BuildCfg) error { } for _, s := range h.Sites { - for i, rf := range s.renderFormats { - for _, s2 := range h.Sites { - // We render site by site, but since the content is lazily rendered - // and a site can "borrow" content from other sites, every site - // needs this set. - s2.rc = &siteRenderingContext{Format: rf} - - isRenderingSite := s == s2 - - if !config.PartialReRender { - if err := s2.preparePagesForRender(isRenderingSite && i == 0); err != nil { - return err + for outIdx, renderFormat := range s.renderFormats { + select { + case <-h.Done(): + return nil + default: + // For the non-renderable pages, we use the content iself as + // template and we may have to re-parse and execute it for + // each output format. + h.TemplateHandler().RebuildClone() + + for _, s2 := range h.Sites { + // We render site by site, but since the content is lazily rendered + // and a site can "borrow" content from other sites, every site + // needs this set. + s2.rc = &siteRenderingContext{Format: renderFormat} + + isRenderingSite := s == s2 + + if !config.PartialReRender { + if err := s2.preparePagesForRender(outIdx, isRenderingSite && outIdx == 0); err != nil { + return err + } } - } - } + } - if !config.SkipRender { - if config.PartialReRender { - if err := s.renderPages(config); err != nil { - return err - } - } else { - if err := s.render(config, i); err != nil { - return err + if !config.SkipRender { + if config.PartialReRender { + if err := s.renderPages(config, outIdx); err != nil { + return err + } + } else { + if err := s.render(config, outIdx); err != nil { + return err + } } } } + } + } if !config.SkipRender { diff --git a/hugolib/hugo_sites_build_errors_test.go b/hugolib/hugo_sites_build_errors_test.go index fce6ec91527..0cfce5d6b9a 100644 --- a/hugolib/hugo_sites_build_errors_test.go +++ b/hugolib/hugo_sites_build_errors_test.go @@ -3,9 +3,13 @@ package hugolib import ( "fmt" "path/filepath" + "regexp" "runtime" "strings" "testing" + "time" + + "github.com/fortytw2/leaktest" "github.com/gohugoio/hugo/common/herrors" "github.com/stretchr/testify/require" @@ -37,7 +41,6 @@ func (t testSiteBuildErrorAsserter) assertErrorMessage(e1, e2 string) { func TestSiteBuildErrors(t *testing.T) { t.Parallel() - assert := require.New(t) const ( yamlcontent = "yamlcontent" @@ -87,9 +90,9 @@ func TestSiteBuildErrors(t *testing.T) { }, assertCreateError: func(a testSiteBuildErrorAsserter, err error) { fe := a.getFileError(err) - assert.Equal(5, fe.Position().LineNumber) - assert.Equal(1, fe.Position().ColumnNumber) - assert.Equal("go-html-template", fe.ChromaLexer) + a.assert.Equal(5, fe.Position().LineNumber) + a.assert.Equal(1, fe.Position().ColumnNumber) + a.assert.Equal("go-html-template", fe.ChromaLexer) a.assertErrorMessage("\"layouts/_default/single.html:5:1\": parse failed: template: _default/single.html:5: unexpected \"}\" in operand", fe.Error()) }, @@ -102,9 +105,9 @@ func TestSiteBuildErrors(t *testing.T) { }, assertBuildError: func(a testSiteBuildErrorAsserter, err error) { fe := a.getFileError(err) - assert.Equal(5, fe.Position().LineNumber) - assert.Equal(14, fe.Position().ColumnNumber) - assert.Equal("go-html-template", fe.ChromaLexer) + a.assert.Equal(5, fe.Position().LineNumber) + a.assert.Equal(14, fe.Position().ColumnNumber) + a.assert.Equal("go-html-template", fe.ChromaLexer) a.assertErrorMessage("\"layouts/_default/single.html:5:14\": execute of template failed", fe.Error()) }, @@ -117,9 +120,9 @@ func TestSiteBuildErrors(t *testing.T) { }, assertBuildError: func(a testSiteBuildErrorAsserter, err error) { fe := a.getFileError(err) - assert.Equal(5, fe.Position().LineNumber) - assert.Equal(14, fe.Position().ColumnNumber) - assert.Equal("go-html-template", fe.ChromaLexer) + a.assert.Equal(5, fe.Position().LineNumber) + a.assert.Equal(14, fe.Position().ColumnNumber) + a.assert.Equal("go-html-template", fe.ChromaLexer) a.assertErrorMessage("\"layouts/_default/single.html:5:14\": execute of template failed", fe.Error()) }, @@ -142,8 +145,8 @@ func TestSiteBuildErrors(t *testing.T) { }, assertBuildError: func(a testSiteBuildErrorAsserter, err error) { fe := a.getFileError(err) - assert.Equal(7, fe.Position().LineNumber) - assert.Equal("md", fe.ChromaLexer) + a.assert.Equal(7, fe.Position().LineNumber) + a.assert.Equal("md", fe.ChromaLexer) // Make sure that it contains both the content file and template a.assertErrorMessage(`content/myyaml.md:7:10": failed to render shortcode "sc"`, fe.Error()) a.assertErrorMessage(`shortcodes/sc.html:4:22: executing "shortcodes/sc.html" at <.Page.Titles>: can't evaluate`, fe.Error()) @@ -157,9 +160,9 @@ func TestSiteBuildErrors(t *testing.T) { }, assertBuildError: func(a testSiteBuildErrorAsserter, err error) { fe := a.getFileError(err) - assert.Equal(7, fe.Position().LineNumber) - assert.Equal(14, fe.Position().ColumnNumber) - assert.Equal("md", fe.ChromaLexer) + a.assert.Equal(7, fe.Position().LineNumber) + a.assert.Equal(14, fe.Position().ColumnNumber) + a.assert.Equal("md", fe.ChromaLexer) a.assertErrorMessage("\"content/myyaml.md:7:14\": failed to extract shortcode: template for shortcode \"nono\" not found", fe.Error()) }, }, @@ -181,8 +184,8 @@ func TestSiteBuildErrors(t *testing.T) { }, assertBuildError: func(a testSiteBuildErrorAsserter, err error) { fe := a.getFileError(err) - assert.Equal(6, fe.Position().LineNumber) - assert.Equal("toml", fe.ErrorContext.ChromaLexer) + a.assert.Equal(6, fe.Position().LineNumber) + a.assert.Equal("toml", fe.ErrorContext.ChromaLexer) }, }, @@ -195,8 +198,8 @@ func TestSiteBuildErrors(t *testing.T) { assertBuildError: func(a testSiteBuildErrorAsserter, err error) { fe := a.getFileError(err) - assert.Equal(3, fe.Position().LineNumber) - assert.Equal("json", fe.ErrorContext.ChromaLexer) + a.assert.Equal(3, fe.Position().LineNumber) + a.assert.Equal("json", fe.ErrorContext.ChromaLexer) }, }, @@ -209,42 +212,43 @@ func TestSiteBuildErrors(t *testing.T) { }, assertBuildError: func(a testSiteBuildErrorAsserter, err error) { - assert.Error(err) + a.assert.Error(err) // This is fixed in latest Go source - if strings.Contains(runtime.Version(), "devel") { + if regexp.MustCompile("devel|12").MatchString(runtime.Version()) { fe := a.getFileError(err) - assert.Equal(5, fe.Position().LineNumber) - assert.Equal(21, fe.Position().ColumnNumber) + a.assert.Equal(5, fe.Position().LineNumber) + a.assert.Equal(21, fe.Position().ColumnNumber) } else { - assert.Contains(err.Error(), `execute of template failed: panic in Execute`) + a.assert.Contains(err.Error(), `execute of template failed: panic in Execute`) } }, }, } for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + assert := require.New(t) + errorAsserter := testSiteBuildErrorAsserter{ + assert: assert, + name: test.name, + } - errorAsserter := testSiteBuildErrorAsserter{ - assert: assert, - name: test.name, - } + b := newTestSitesBuilder(t).WithSimpleConfigFile() - b := newTestSitesBuilder(t).WithSimpleConfigFile() + f := func(fileType, content string) string { + if fileType != test.fileType { + return content + } + return test.fileFixer(content) - f := func(fileType, content string) string { - if fileType != test.fileType { - return content } - return test.fileFixer(content) - } - - b.WithTemplatesAdded("layouts/shortcodes/sc.html", f(shortcode, `SHORTCODE L1 + b.WithTemplatesAdded("layouts/shortcodes/sc.html", f(shortcode, `SHORTCODE L1 SHORTCODE L2 SHORTCODE L3: SHORTCODE L4: {{ .Page.Title }} `)) - b.WithTemplatesAdded("layouts/_default/baseof.html", f(base, `BASEOF L1 + b.WithTemplatesAdded("layouts/_default/baseof.html", f(base, `BASEOF L1 BASEOF L2 BASEOF L3 BASEOF L4{{ if .Title }}{{ end }} @@ -252,7 +256,7 @@ BASEOF L4{{ if .Title }}{{ end }} BASEOF L6 `)) - b.WithTemplatesAdded("layouts/_default/single.html", f(single, `{{ define "main" }} + b.WithTemplatesAdded("layouts/_default/single.html", f(single, `{{ define "main" }} SINGLE L2: SINGLE L3: SINGLE L4: @@ -260,7 +264,7 @@ SINGLE L5: {{ .Title }} {{ .Content }} {{ end }} `)) - b.WithContent("myyaml.md", f(yamlcontent, `--- + b.WithContent("myyaml.md", f(yamlcontent, `--- title: "The YAML" --- @@ -274,7 +278,7 @@ The end. `)) - b.WithContent("mytoml.md", f(tomlcontent, `+++ + b.WithContent("mytoml.md", f(tomlcontent, `+++ title = "The TOML" p1 = "v" p2 = "v" @@ -287,7 +291,7 @@ Some content. `)) - b.WithContent("myjson.md", f(jsoncontent, `{ + b.WithContent("myjson.md", f(jsoncontent, `{ "title": "This is a title", "description": "This is a description." } @@ -297,26 +301,30 @@ Some content. `)) - createErr := b.CreateSitesE() - if test.assertCreateError != nil { - test.assertCreateError(errorAsserter, createErr) - } else { - assert.NoError(createErr) - } - - if createErr == nil { - buildErr := b.BuildE(BuildCfg{}) - if test.assertBuildError != nil { - test.assertBuildError(errorAsserter, buildErr) + createErr := b.CreateSitesE() + if test.assertCreateError != nil { + test.assertCreateError(errorAsserter, createErr) } else { - assert.NoError(buildErr) + assert.NoError(createErr) } - } + + if createErr == nil { + buildErr := b.BuildE(BuildCfg{}) + if test.assertBuildError != nil { + test.assertBuildError(errorAsserter, buildErr) + } else { + assert.NoError(buildErr) + } + } + }) } } // https://github.com/gohugoio/hugo/issues/5375 func TestSiteBuildTimeout(t *testing.T) { + if !isCI() { + defer leaktest.CheckTimeout(t, 10*time.Second)() + } b := newTestSitesBuilder(t) b.WithConfigFile("toml", ` @@ -341,6 +349,6 @@ title: "A page" } - b.CreateSites().Build(BuildCfg{}) + b.CreateSites().BuildFail(BuildCfg{}) } diff --git a/hugolib/hugo_sites_build_test.go b/hugolib/hugo_sites_build_test.go index 83b96b7f4e7..cbf10def498 100644 --- a/hugolib/hugo_sites_build_test.go +++ b/hugolib/hugo_sites_build_test.go @@ -1,16 +1,16 @@ package hugolib import ( - "bytes" "fmt" "strings" "testing" - "html/template" "os" "path/filepath" "time" + "github.com/gohugoio/hugo/resources/page" + "github.com/fortytw2/leaktest" "github.com/fsnotify/fsnotify" "github.com/gohugoio/hugo/helpers" @@ -66,8 +66,8 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) { assert.Equal("/blog/en/foo", enSite.PathSpec.RelURL("foo", true)) - doc1en := enSite.RegularPages[0] - doc1fr := frSite.RegularPages[0] + doc1en := enSite.RegularPages()[0] + doc1fr := frSite.RegularPages()[0] enPerm := doc1en.Permalink() enRelPerm := doc1en.RelPermalink() @@ -100,7 +100,7 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) { // Check list pages b.AssertFileContent(pathMod("public/fr/sect/index.html"), "List", "Bonjour") b.AssertFileContent("public/en/sect/index.html", "List", "Hello") - b.AssertFileContent(pathMod("public/fr/plaques/frtag1/index.html"), "Taxonomy List", "Bonjour") + b.AssertFileContent(pathMod("public/fr/plaques/FRtag1/index.html"), "Taxonomy List", "Bonjour") b.AssertFileContent("public/en/tags/tag1/index.html", "Taxonomy List", "Hello") // Check sitemaps @@ -126,8 +126,8 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) { pathMod(``, + `TOC2: