mirror of
https://github.com/gohugoio/hugo.git
synced 2025-09-02 22:52:51 +02:00
Simplify page tree logic
This is preparation for #6041. For historic reasons, the code for bulding the section tree and the taxonomies were very much separate. This works, but makes it hard to extend, maintain, and possibly not so fast as it could be. This simplification also introduces 3 slightly breaking changes, which I suspect most people will be pleased about. See referenced issues: This commit also switches the radix tree dependency to a mutable implementation: github.com/armon/go-radix. Fixes #6154 Fixes #6153 Fixes #6152
This commit is contained in:
@@ -14,15 +14,13 @@
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
radix "github.com/hashicorp/go-immutable-radix"
|
||||
radix "github.com/armon/go-radix"
|
||||
|
||||
"github.com/gohugoio/hugo/output"
|
||||
"github.com/gohugoio/hugo/parser/metadecoders"
|
||||
@@ -623,118 +621,6 @@ func (h *HugoSites) renderCrossSitesArtifacts() error {
|
||||
s.siteCfg.sitemap.Filename, h.toSiteInfos(), smLayouts...)
|
||||
}
|
||||
|
||||
// createMissingPages creates home page, taxonomies etc. that isnt't created as an
|
||||
// effect of having a content file.
|
||||
func (h *HugoSites) createMissingPages() error {
|
||||
|
||||
for _, s := range h.Sites {
|
||||
if s.isEnabled(page.KindHome) {
|
||||
// home pages
|
||||
homes := s.findWorkPagesByKind(page.KindHome)
|
||||
if len(homes) > 1 {
|
||||
panic("Too many homes")
|
||||
}
|
||||
var home *pageState
|
||||
if len(homes) == 0 {
|
||||
home = s.newPage(page.KindHome)
|
||||
s.workAllPages = append(s.workAllPages, home)
|
||||
} else {
|
||||
home = homes[0]
|
||||
}
|
||||
|
||||
s.home = home
|
||||
}
|
||||
|
||||
// Will create content-less root sections.
|
||||
newSections := s.assembleSections()
|
||||
s.workAllPages = append(s.workAllPages, newSections...)
|
||||
|
||||
taxonomyTermEnabled := s.isEnabled(page.KindTaxonomyTerm)
|
||||
taxonomyEnabled := s.isEnabled(page.KindTaxonomy)
|
||||
|
||||
// taxonomy list and terms pages
|
||||
taxonomies := s.Language().GetStringMapString("taxonomies")
|
||||
if len(taxonomies) > 0 {
|
||||
taxonomyPages := s.findWorkPagesByKind(page.KindTaxonomy)
|
||||
taxonomyTermsPages := s.findWorkPagesByKind(page.KindTaxonomyTerm)
|
||||
|
||||
// Make them navigable from WeightedPage etc.
|
||||
for _, p := range taxonomyPages {
|
||||
ni := p.getTaxonomyNodeInfo()
|
||||
if ni == nil {
|
||||
// This can be nil for taxonomies, e.g. an author,
|
||||
// with a content file, but no actual usage.
|
||||
// Create one.
|
||||
sections := p.SectionsEntries()
|
||||
if len(sections) < 2 {
|
||||
// Invalid state
|
||||
panic(fmt.Sprintf("invalid taxonomy state for %q with sections %v", p.pathOrTitle(), sections))
|
||||
}
|
||||
ni = p.s.taxonomyNodes.GetOrAdd(sections[0], path.Join(sections[1:]...))
|
||||
}
|
||||
ni.TransferValues(p)
|
||||
}
|
||||
for _, p := range taxonomyTermsPages {
|
||||
p.getTaxonomyNodeInfo().TransferValues(p)
|
||||
}
|
||||
|
||||
for _, plural := range taxonomies {
|
||||
if taxonomyTermEnabled {
|
||||
foundTaxonomyTermsPage := false
|
||||
for _, p := range taxonomyTermsPages {
|
||||
if p.SectionsPath() == plural {
|
||||
foundTaxonomyTermsPage = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !foundTaxonomyTermsPage {
|
||||
n := s.newPage(page.KindTaxonomyTerm, plural)
|
||||
n.getTaxonomyNodeInfo().TransferValues(n)
|
||||
s.workAllPages = append(s.workAllPages, n)
|
||||
}
|
||||
}
|
||||
|
||||
if taxonomyEnabled {
|
||||
for termKey := range s.Taxonomies[plural] {
|
||||
|
||||
foundTaxonomyPage := false
|
||||
|
||||
for _, p := range taxonomyPages {
|
||||
sectionsPath := p.SectionsPath()
|
||||
|
||||
if !strings.HasPrefix(sectionsPath, plural) {
|
||||
continue
|
||||
}
|
||||
|
||||
singularKey := strings.TrimPrefix(sectionsPath, plural)
|
||||
singularKey = strings.TrimPrefix(singularKey, "/")
|
||||
|
||||
if singularKey == termKey {
|
||||
foundTaxonomyPage = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !foundTaxonomyPage {
|
||||
info := s.taxonomyNodes.Get(plural, termKey)
|
||||
if info == nil {
|
||||
panic("no info found")
|
||||
}
|
||||
|
||||
n := s.newTaxonomyPage(info.term, info.plural, info.termKey)
|
||||
info.TransferValues(n)
|
||||
s.workAllPages = append(s.workAllPages, n)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (h *HugoSites) removePageByFilename(filename string) {
|
||||
for _, s := range h.Sites {
|
||||
s.removePageFilename(filename)
|
||||
@@ -742,23 +628,6 @@ func (h *HugoSites) removePageByFilename(filename string) {
|
||||
}
|
||||
|
||||
func (h *HugoSites) createPageCollections() error {
|
||||
for _, s := range h.Sites {
|
||||
for _, p := range s.rawAllPages {
|
||||
if !s.isEnabled(p.Kind()) {
|
||||
continue
|
||||
}
|
||||
|
||||
shouldBuild := s.shouldBuild(p)
|
||||
s.buildStats.update(p)
|
||||
if shouldBuild {
|
||||
if p.m.headless {
|
||||
s.headlessPages = append(s.headlessPages, p)
|
||||
} else {
|
||||
s.workAllPages = append(s.workAllPages, p)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
allPages := newLazyPagesFactory(func() page.Pages {
|
||||
var pages page.Pages
|
||||
@@ -950,8 +819,7 @@ type contentChangeMap struct {
|
||||
mu sync.RWMutex
|
||||
|
||||
// Holds directories with leaf bundles.
|
||||
leafBundles *radix.Tree
|
||||
leafBundlesTxn *radix.Txn
|
||||
leafBundles *radix.Tree
|
||||
|
||||
// Holds directories with branch bundles.
|
||||
branchBundles map[string]bool
|
||||
@@ -969,18 +837,6 @@ type contentChangeMap struct {
|
||||
symContent map[string]map[string]bool
|
||||
}
|
||||
|
||||
func (m *contentChangeMap) start() {
|
||||
m.mu.Lock()
|
||||
m.leafBundlesTxn = m.leafBundles.Txn()
|
||||
m.mu.Unlock()
|
||||
}
|
||||
|
||||
func (m *contentChangeMap) stop() {
|
||||
m.mu.Lock()
|
||||
m.leafBundles = m.leafBundlesTxn.Commit()
|
||||
m.mu.Unlock()
|
||||
}
|
||||
|
||||
func (m *contentChangeMap) add(filename string, tp bundleDirType) {
|
||||
m.mu.Lock()
|
||||
dir := filepath.Dir(filename) + helpers.FilePathSeparator
|
||||
@@ -989,7 +845,7 @@ func (m *contentChangeMap) add(filename string, tp bundleDirType) {
|
||||
case bundleBranch:
|
||||
m.branchBundles[dir] = true
|
||||
case bundleLeaf:
|
||||
m.leafBundlesTxn.Insert([]byte(dir), true)
|
||||
m.leafBundles.Insert(dir, true)
|
||||
default:
|
||||
panic("invalid bundle type")
|
||||
}
|
||||
@@ -1012,8 +868,8 @@ func (m *contentChangeMap) resolveAndRemove(filename string) (string, string, bu
|
||||
return dir, dir, bundleBranch
|
||||
}
|
||||
|
||||
if key, _, found := m.leafBundles.Root().LongestPrefix([]byte(dir)); found {
|
||||
m.leafBundlesTxn.Delete(key)
|
||||
if key, _, found := m.leafBundles.LongestPrefix(dir); found {
|
||||
m.leafBundles.Delete(key)
|
||||
dir = string(key)
|
||||
return dir, dir, bundleLeaf
|
||||
}
|
||||
|
@@ -18,7 +18,6 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"runtime/trace"
|
||||
"sort"
|
||||
|
||||
"github.com/gohugoio/hugo/output"
|
||||
|
||||
@@ -31,6 +30,7 @@ import (
|
||||
// Build builds all sites. If filesystem events are provided,
|
||||
// this is considered to be a potential partial rebuild.
|
||||
func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
|
||||
|
||||
if h.running {
|
||||
// Make sure we don't trigger rebuilds in parallel.
|
||||
h.runningMu.Lock()
|
||||
@@ -75,25 +75,29 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
|
||||
|
||||
if !config.PartialReRender {
|
||||
prepare := func() error {
|
||||
for _, s := range h.Sites {
|
||||
s.Deps.BuildStartListeners.Notify()
|
||||
}
|
||||
init := func(conf *BuildCfg) error {
|
||||
for _, s := range h.Sites {
|
||||
s.Deps.BuildStartListeners.Notify()
|
||||
}
|
||||
|
||||
if len(events) > 0 {
|
||||
// Rebuild
|
||||
if err := h.initRebuild(conf); err != nil {
|
||||
return errors.Wrap(err, "initRebuild")
|
||||
}
|
||||
} else {
|
||||
if err := h.initSites(conf); err != nil {
|
||||
return errors.Wrap(err, "initSites")
|
||||
if len(events) > 0 {
|
||||
// Rebuild
|
||||
if err := h.initRebuild(conf); err != nil {
|
||||
return errors.Wrap(err, "initRebuild")
|
||||
}
|
||||
} else {
|
||||
if err := h.initSites(conf); err != nil {
|
||||
return errors.Wrap(err, "initSites")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var err error
|
||||
|
||||
f := func() {
|
||||
err = h.process(conf, events...)
|
||||
err = h.process(conf, init, events...)
|
||||
}
|
||||
trace.WithRegion(ctx, "process", f)
|
||||
if err != nil {
|
||||
@@ -195,7 +199,7 @@ func (h *HugoSites) initRebuild(config *BuildCfg) error {
|
||||
}
|
||||
|
||||
for _, s := range h.Sites {
|
||||
s.resetBuildState()
|
||||
s.resetBuildState(config.whatChanged.source)
|
||||
}
|
||||
|
||||
h.reset(config)
|
||||
@@ -205,7 +209,7 @@ func (h *HugoSites) initRebuild(config *BuildCfg) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (h *HugoSites) process(config *BuildCfg, events ...fsnotify.Event) error {
|
||||
func (h *HugoSites) process(config *BuildCfg, init func(config *BuildCfg) error, events ...fsnotify.Event) error {
|
||||
// We should probably refactor the Site and pull up most of the logic from there to here,
|
||||
// but that seems like a daunting task.
|
||||
// So for now, if there are more than one site (language),
|
||||
@@ -215,9 +219,7 @@ func (h *HugoSites) process(config *BuildCfg, events ...fsnotify.Event) error {
|
||||
|
||||
if len(events) > 0 {
|
||||
// This is a rebuild
|
||||
changed, err := firstSite.processPartial(events)
|
||||
config.whatChanged = &changed
|
||||
return err
|
||||
return firstSite.processPartial(config, init, events)
|
||||
}
|
||||
|
||||
return firstSite.process(*config)
|
||||
@@ -235,26 +237,27 @@ func (h *HugoSites) assemble(config *BuildCfg) error {
|
||||
}
|
||||
}
|
||||
|
||||
if err := h.createPageCollections(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if config.whatChanged.source {
|
||||
for _, s := range h.Sites {
|
||||
if err := s.assembleTaxonomies(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create pagexs for the section pages etc. without content file.
|
||||
if err := h.createMissingPages(); err != nil {
|
||||
return err
|
||||
if !config.whatChanged.source {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, s := range h.Sites {
|
||||
s.setupSitePages()
|
||||
sort.Stable(s.workAllPages)
|
||||
if err := s.assemblePagesMap(s); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := s.pagesMap.assembleTaxonomies(s); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := s.createWorkAllPages(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if err := h.createPageCollections(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
|
@@ -365,7 +365,6 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
|
||||
require.NotNil(t, enTags["tag1"])
|
||||
require.NotNil(t, frTags["FRtag1"])
|
||||
b.AssertFileContent("public/fr/plaques/FRtag1/index.html", "FRtag1|Bonjour|http://example.com/blog/fr/plaques/FRtag1/")
|
||||
b.AssertFileContent("public/en/tags/tag1/index.html", "tag1|Hello|http://example.com/blog/en/tags/tag1/")
|
||||
|
||||
// Check Blackfriday config
|
||||
require.True(t, strings.Contains(content(doc1fr), "«"), content(doc1fr))
|
||||
@@ -470,13 +469,6 @@ func TestMultiSitesRebuild(t *testing.T) {
|
||||
func(t *testing.T) {
|
||||
assert.Len(enSite.RegularPages(), 4, "1 en removed")
|
||||
|
||||
// Check build stats
|
||||
require.Equal(t, 1, enSite.buildStats.draftCount, "Draft")
|
||||
require.Equal(t, 1, enSite.buildStats.futureCount, "Future")
|
||||
require.Equal(t, 1, enSite.buildStats.expiredCount, "Expired")
|
||||
require.Equal(t, 0, frSite.buildStats.draftCount, "Draft")
|
||||
require.Equal(t, 1, frSite.buildStats.futureCount, "Future")
|
||||
require.Equal(t, 1, frSite.buildStats.expiredCount, "Expired")
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -609,70 +601,6 @@ func TestMultiSitesRebuild(t *testing.T) {
|
||||
|
||||
}
|
||||
|
||||
func TestAddNewLanguage(t *testing.T) {
|
||||
t.Parallel()
|
||||
assert := require.New(t)
|
||||
|
||||
b := newMultiSiteTestDefaultBuilder(t)
|
||||
b.CreateSites().Build(BuildCfg{})
|
||||
|
||||
fs := b.Fs
|
||||
|
||||
newConfig := multiSiteTOMLConfigTemplate + `
|
||||
|
||||
[Languages.sv]
|
||||
weight = 15
|
||||
title = "Svenska"
|
||||
`
|
||||
|
||||
writeNewContentFile(t, fs.Source, "Swedish Contentfile", "2016-01-01", "content/sect/doc1.sv.md", 10)
|
||||
// replace the config
|
||||
b.WithNewConfig(newConfig)
|
||||
|
||||
sites := b.H
|
||||
|
||||
assert.NoError(b.LoadConfig())
|
||||
err := b.H.Build(BuildCfg{NewConfig: b.Cfg})
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to rebuild sites: %s", err)
|
||||
}
|
||||
|
||||
require.Len(t, sites.Sites, 5, fmt.Sprintf("Len %d", len(sites.Sites)))
|
||||
|
||||
// The Swedish site should be put in the middle (language weight=15)
|
||||
enSite := sites.Sites[0]
|
||||
svSite := sites.Sites[1]
|
||||
frSite := sites.Sites[2]
|
||||
require.True(t, enSite.language.Lang == "en", enSite.language.Lang)
|
||||
require.True(t, svSite.language.Lang == "sv", svSite.language.Lang)
|
||||
require.True(t, frSite.language.Lang == "fr", frSite.language.Lang)
|
||||
|
||||
homeEn := enSite.getPage(page.KindHome)
|
||||
require.NotNil(t, homeEn)
|
||||
require.Len(t, homeEn.Translations(), 4)
|
||||
|
||||
require.Equal(t, "sv", homeEn.Translations()[0].Language().Lang)
|
||||
|
||||
require.Len(t, enSite.RegularPages(), 5)
|
||||
require.Len(t, frSite.RegularPages(), 4)
|
||||
|
||||
// Veriy Swedish site
|
||||
require.Len(t, svSite.RegularPages(), 1)
|
||||
svPage := svSite.RegularPages()[0]
|
||||
|
||||
require.Equal(t, "Swedish Contentfile", svPage.Title())
|
||||
require.Equal(t, "sv", svPage.Language().Lang)
|
||||
require.Len(t, svPage.Translations(), 2)
|
||||
require.Len(t, svPage.AllTranslations(), 3)
|
||||
require.Equal(t, "en", svPage.Translations()[0].Language().Lang)
|
||||
|
||||
// Regular pages have no children
|
||||
require.Len(t, svPage.Pages(), 0)
|
||||
require.Len(t, svPage.Data().(page.Data).Pages(), 0)
|
||||
|
||||
}
|
||||
|
||||
// https://github.com/gohugoio/hugo/issues/4706
|
||||
func TestContentStressTest(t *testing.T) {
|
||||
b := newTestSitesBuilder(t)
|
||||
@@ -775,13 +703,13 @@ END
|
||||
}
|
||||
|
||||
func checkContent(s *sitesBuilder, filename string, matches ...string) {
|
||||
s.T.Helper()
|
||||
content := readDestination(s.T, s.Fs, filename)
|
||||
for _, match := range matches {
|
||||
if !strings.Contains(content, match) {
|
||||
s.Fatalf("No match for %q in content for %s\n%q", match, filename, content)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestTranslationsFromContentToNonContent(t *testing.T) {
|
||||
|
@@ -54,7 +54,7 @@ Content.
|
||||
{{ range (.Paginate .Site.RegularPages).Pages }}
|
||||
* Page Paginate: {{ .Title }}|Summary: {{ .Summary }}|Content: {{ .Content }}
|
||||
{{ end }}
|
||||
{{ range .Pages }}
|
||||
{{ range .Site.RegularPages }}
|
||||
* Page Pages: {{ .Title }}|Summary: {{ .Summary }}|Content: {{ .Content }}
|
||||
{{ end }}
|
||||
`)
|
||||
|
@@ -143,8 +143,8 @@ Some **Markdown** in JSON shortcode.
|
||||
const (
|
||||
commonPageTemplate = `|{{ .Kind }}|{{ .Title }}|{{ .Path }}|{{ .Summary }}|{{ .Content }}|RelPermalink: {{ .RelPermalink }}|WordCount: {{ .WordCount }}|Pages: {{ .Pages }}|Data Pages: Pages({{ len .Data.Pages }})|Resources: {{ len .Resources }}|Summary: {{ .Summary }}`
|
||||
commonPaginatorTemplate = `|Paginator: {{ with .Paginator }}{{ .PageNumber }}{{ else }}NIL{{ end }}`
|
||||
commonListTemplateNoPaginator = `|{{ range $i, $e := (.Pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}`
|
||||
commonListTemplate = commonPaginatorTemplate + `|{{ range $i, $e := (.Pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}`
|
||||
commonListTemplateNoPaginator = `|{{ $pages := .Pages }}{{ if .IsHome }}{{ $pages = .Site.RegularPages }}{{ end }}{{ range $i, $e := ($pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}`
|
||||
commonListTemplate = commonPaginatorTemplate + `|{{ $pages := .Pages }}{{ if .IsHome }}{{ $pages = .Site.RegularPages }}{{ end }}{{ range $i, $e := ($pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}`
|
||||
commonShortcodeTemplate = `|{{ .Name }}|{{ .Ordinal }}|{{ .Page.Summary }}|{{ .Page.Content }}|WordCount: {{ .Page.WordCount }}`
|
||||
prevNextTemplate = `|Prev: {{ with .Prev }}{{ .RelPermalink }}{{ end }}|Next: {{ with .Next }}{{ .RelPermalink }}{{ end }}`
|
||||
prevNextInSectionTemplate = `|PrevInSection: {{ with .PrevInSection }}{{ .RelPermalink }}{{ end }}|NextInSection: {{ with .NextInSection }}{{ .RelPermalink }}{{ end }}`
|
||||
@@ -193,7 +193,7 @@ Some **Markdown** in JSON shortcode.
|
||||
b.AssertFileContent("public/index.html",
|
||||
"home|In English",
|
||||
"Site params: Rules",
|
||||
"Pages: Pages(18)|Data Pages: Pages(18)",
|
||||
"Pages: Pages(6)|Data Pages: Pages(6)",
|
||||
"Paginator: 1",
|
||||
"First Site: In English",
|
||||
"RelPermalink: /",
|
||||
|
109
hugolib/page.go
109
hugolib/page.go
@@ -23,6 +23,8 @@ import (
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/gohugoio/hugo/common/maps"
|
||||
|
||||
"github.com/gohugoio/hugo/hugofs/files"
|
||||
|
||||
"github.com/bep/gitmap"
|
||||
@@ -121,31 +123,66 @@ func (p *pageState) MarshalJSON() ([]byte, error) {
|
||||
return page.MarshalPageToJSON(p)
|
||||
}
|
||||
|
||||
func (p *pageState) Pages() page.Pages {
|
||||
p.pagesInit.Do(func() {
|
||||
if p.pages != nil {
|
||||
return
|
||||
}
|
||||
func (p *pageState) getPages() page.Pages {
|
||||
b := p.bucket
|
||||
if b == nil {
|
||||
return nil
|
||||
}
|
||||
return b.getPages()
|
||||
}
|
||||
|
||||
func (p *pageState) getPagesAndSections() page.Pages {
|
||||
b := p.bucket
|
||||
if b == nil {
|
||||
return nil
|
||||
}
|
||||
return b.getPagesAndSections()
|
||||
}
|
||||
|
||||
// TODO(bep) cm add a test
|
||||
func (p *pageState) RegularPages() page.Pages {
|
||||
p.regularPagesInit.Do(func() {
|
||||
var pages page.Pages
|
||||
|
||||
switch p.Kind() {
|
||||
case page.KindPage:
|
||||
case page.KindHome:
|
||||
pages = p.s.RegularPages()
|
||||
case page.KindSection, page.KindHome, page.KindTaxonomyTerm:
|
||||
pages = p.getPages()
|
||||
case page.KindTaxonomy:
|
||||
termInfo := p.getTaxonomyNodeInfo()
|
||||
taxonomy := p.s.Taxonomies[termInfo.plural].Get(termInfo.termKey)
|
||||
pages = taxonomy.Pages()
|
||||
case page.KindTaxonomyTerm:
|
||||
plural := p.getTaxonomyNodeInfo().plural
|
||||
// A list of all page.KindTaxonomy pages with matching plural
|
||||
for _, p := range p.s.findPagesByKind(page.KindTaxonomy) {
|
||||
if p.SectionsEntries()[0] == plural {
|
||||
all := p.Pages()
|
||||
for _, p := range all {
|
||||
if p.IsPage() {
|
||||
pages = append(pages, p)
|
||||
}
|
||||
}
|
||||
case kind404, kindSitemap, kindRobotsTXT:
|
||||
default:
|
||||
pages = p.s.RegularPages()
|
||||
}
|
||||
|
||||
p.regularPages = pages
|
||||
|
||||
})
|
||||
|
||||
return p.regularPages
|
||||
}
|
||||
|
||||
func (p *pageState) Pages() page.Pages {
|
||||
p.pagesInit.Do(func() {
|
||||
var pages page.Pages
|
||||
|
||||
switch p.Kind() {
|
||||
case page.KindPage:
|
||||
case page.KindSection, page.KindHome:
|
||||
pages = p.getPagesAndSections()
|
||||
case page.KindTaxonomy:
|
||||
termInfo := p.bucket
|
||||
plural := maps.GetString(termInfo.meta, "plural")
|
||||
term := maps.GetString(termInfo.meta, "termKey")
|
||||
taxonomy := p.s.Taxonomies[plural].Get(term)
|
||||
pages = taxonomy.Pages()
|
||||
case page.KindTaxonomyTerm:
|
||||
pages = p.getPagesAndSections()
|
||||
default:
|
||||
pages = p.s.Pages()
|
||||
}
|
||||
|
||||
@@ -295,10 +332,9 @@ func (p *pageState) getLayoutDescriptor() output.LayoutDescriptor {
|
||||
if len(sections) > 0 {
|
||||
section = sections[0]
|
||||
}
|
||||
case page.KindTaxonomyTerm:
|
||||
section = p.getTaxonomyNodeInfo().singular
|
||||
case page.KindTaxonomy:
|
||||
section = p.getTaxonomyNodeInfo().parent.singular
|
||||
case page.KindTaxonomyTerm, page.KindTaxonomy:
|
||||
section = maps.GetString(p.bucket.meta, "singular")
|
||||
|
||||
default:
|
||||
}
|
||||
|
||||
@@ -359,11 +395,6 @@ func (p *pageState) initPage() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *pageState) setPages(pages page.Pages) {
|
||||
page.SortByDefault(pages)
|
||||
p.pages = pages
|
||||
}
|
||||
|
||||
func (p *pageState) renderResources() (err error) {
|
||||
p.resourcesPublishInit.Do(func() {
|
||||
var toBeDeleted []int
|
||||
@@ -489,13 +520,6 @@ func (p *pageState) addResources(r ...resource.Resource) {
|
||||
p.resources = append(p.resources, r...)
|
||||
}
|
||||
|
||||
func (p *pageState) addSectionToParent() {
|
||||
if p.parent == nil {
|
||||
return
|
||||
}
|
||||
p.parent.subSections = append(p.parent.subSections, p)
|
||||
}
|
||||
|
||||
func (p *pageState) mapContent(meta *pageMeta) error {
|
||||
|
||||
s := p.shortcodeState
|
||||
@@ -743,27 +767,6 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *pageState) getTaxonomyNodeInfo() *taxonomyNodeInfo {
|
||||
info := p.s.taxonomyNodes.Get(p.SectionsEntries()...)
|
||||
|
||||
if info == nil {
|
||||
// There can be unused content pages for taxonomies (e.g. author that
|
||||
// has not written anything, yet), and these will not have a taxonomy
|
||||
// node created in the assemble taxonomies step.
|
||||
return nil
|
||||
}
|
||||
|
||||
return info
|
||||
|
||||
}
|
||||
|
||||
func (p *pageState) sortParentSections() {
|
||||
if p.parent == nil {
|
||||
return
|
||||
}
|
||||
page.SortByDefault(p.parent.subSections)
|
||||
}
|
||||
|
||||
// sourceRef returns the reference used by GetPage and ref/relref shortcodes to refer to
|
||||
// this page. It is prefixed with a "/".
|
||||
//
|
||||
|
@@ -30,6 +30,8 @@ type pageCommon struct {
|
||||
s *Site
|
||||
m *pageMeta
|
||||
|
||||
bucket *pagesMapBucket
|
||||
|
||||
// Laziliy initialized dependencies.
|
||||
init *lazy.Init
|
||||
|
||||
@@ -101,17 +103,17 @@ type pageCommon struct {
|
||||
translationKey string
|
||||
translationKeyInit sync.Once
|
||||
|
||||
// Will only be set for sections and regular pages.
|
||||
// Will only be set for bundled pages.
|
||||
parent *pageState
|
||||
|
||||
// Will only be set for section pages and the home page.
|
||||
subSections page.Pages
|
||||
|
||||
// Set in fast render mode to force render a given page.
|
||||
forceRender bool
|
||||
}
|
||||
|
||||
type pagePages struct {
|
||||
pages page.Pages
|
||||
pagesInit sync.Once
|
||||
pages page.Pages
|
||||
|
||||
regularPagesInit sync.Once
|
||||
regularPages page.Pages
|
||||
}
|
||||
|
@@ -16,6 +16,8 @@ package hugolib
|
||||
import (
|
||||
"sync"
|
||||
|
||||
"github.com/gohugoio/hugo/common/maps"
|
||||
|
||||
"github.com/gohugoio/hugo/resources/page"
|
||||
)
|
||||
|
||||
@@ -36,22 +38,22 @@ func (p *pageData) Data() interface{} {
|
||||
|
||||
switch p.Kind() {
|
||||
case page.KindTaxonomy:
|
||||
termInfo := p.getTaxonomyNodeInfo()
|
||||
pluralInfo := termInfo.parent
|
||||
bucket := p.bucket
|
||||
meta := bucket.meta
|
||||
plural := maps.GetString(meta, "plural")
|
||||
singular := maps.GetString(meta, "singular")
|
||||
|
||||
singular := pluralInfo.singular
|
||||
plural := pluralInfo.plural
|
||||
term := termInfo.term
|
||||
taxonomy := p.s.Taxonomies[plural].Get(termInfo.termKey)
|
||||
taxonomy := p.s.Taxonomies[plural].Get(maps.GetString(meta, "termKey"))
|
||||
|
||||
p.data[singular] = taxonomy
|
||||
p.data["Singular"] = singular
|
||||
p.data["Singular"] = meta["singular"]
|
||||
p.data["Plural"] = plural
|
||||
p.data["Term"] = term
|
||||
p.data["Term"] = meta["term"]
|
||||
case page.KindTaxonomyTerm:
|
||||
info := p.getTaxonomyNodeInfo()
|
||||
plural := info.plural
|
||||
singular := info.singular
|
||||
bucket := p.bucket
|
||||
meta := bucket.meta
|
||||
plural := maps.GetString(meta, "plural")
|
||||
singular := maps.GetString(meta, "singular")
|
||||
|
||||
p.data["Singular"] = singular
|
||||
p.data["Plural"] = plural
|
||||
|
@@ -80,7 +80,17 @@ func (p *pagePaginator) Paginator(options ...interface{}) (*page.Pager, error) {
|
||||
|
||||
pd := p.source.targetPathDescriptor
|
||||
pd.Type = p.source.outputFormat()
|
||||
paginator, err := page.Paginate(pd, p.source.Pages(), pagerSize)
|
||||
|
||||
var pages page.Pages
|
||||
if p.source.IsHome() {
|
||||
// From Hugo 0.57 we made home.Pages() work like any other
|
||||
// section. To avoid the default paginators for the home page
|
||||
// changing in the wild, we make this a special case.
|
||||
pages = p.source.s.RegularPages()
|
||||
} else {
|
||||
pages = p.source.Pages()
|
||||
}
|
||||
paginator, err := page.Paginate(pd, pages, pagerSize)
|
||||
if err != nil {
|
||||
initErr = err
|
||||
return
|
||||
|
@@ -27,9 +27,8 @@ import (
|
||||
bp "github.com/gohugoio/hugo/bufferpool"
|
||||
"github.com/gohugoio/hugo/tpl"
|
||||
|
||||
"github.com/gohugoio/hugo/output"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/output"
|
||||
"github.com/gohugoio/hugo/resources/page"
|
||||
"github.com/gohugoio/hugo/resources/resource"
|
||||
)
|
||||
|
@@ -109,9 +109,21 @@ func (pt pageTree) Page() page.Page {
|
||||
}
|
||||
|
||||
func (pt pageTree) Parent() page.Page {
|
||||
return pt.p.parent
|
||||
if pt.p.parent != nil {
|
||||
return pt.p.parent
|
||||
}
|
||||
|
||||
if pt.p.bucket == nil || pt.p.bucket.parent == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return pt.p.bucket.parent.owner
|
||||
}
|
||||
|
||||
func (pt pageTree) Sections() page.Pages {
|
||||
return pt.p.subSections
|
||||
if pt.p.bucket == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return pt.p.bucket.getSections()
|
||||
}
|
||||
|
@@ -531,7 +531,6 @@ date: 2018-01-15
|
||||
assert.Equal(2017, s.getPage("/no-index").Date().Year())
|
||||
assert.True(s.getPage("/with-index-no-date").Date().IsZero())
|
||||
assert.Equal(2018, s.getPage("/with-index-date").Date().Year())
|
||||
|
||||
}
|
||||
|
||||
func TestCreateNewPage(t *testing.T) {
|
||||
|
@@ -1040,6 +1040,10 @@ slug: leaf
|
||||
b.WithContent("sv/b1/data2.json", "sv: data2")
|
||||
b.WithContent("nb/b1/data2.json", "nb: data2")
|
||||
|
||||
b.WithContent("en/b3/_index.md", createPage("en: branch"))
|
||||
b.WithContent("en/b3/p1.md", createPage("en: page"))
|
||||
b.WithContent("en/b3/data1.json", "en: data")
|
||||
|
||||
b.Build(BuildCfg{})
|
||||
|
||||
b.AssertFileContent("public/en/index.html",
|
||||
|
@@ -17,8 +17,12 @@ import (
|
||||
"fmt"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/gohugoio/hugo/resources/resource"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
@@ -32,6 +36,7 @@ var ambiguityFlag = &pageState{}
|
||||
|
||||
// PageCollections contains the page collections for a site.
|
||||
type PageCollections struct {
|
||||
pagesMap *pagesMap
|
||||
|
||||
// Includes absolute all pages (of all types), including drafts etc.
|
||||
rawAllPages pageStatePages
|
||||
@@ -340,15 +345,6 @@ func (*PageCollections) findPagesByKindInWorkPages(kind string, inPages pageStat
|
||||
return pages
|
||||
}
|
||||
|
||||
func (c *PageCollections) findFirstWorkPageByKindIn(kind string) *pageState {
|
||||
for _, p := range c.workAllPages {
|
||||
if p.Kind() == kind {
|
||||
return p
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *PageCollections) addPage(page *pageState) {
|
||||
c.rawAllPages = append(c.rawAllPages, page)
|
||||
}
|
||||
@@ -389,3 +385,189 @@ func (c *PageCollections) clearResourceCacheForPage(page *pageState) {
|
||||
page.s.ResourceSpec.DeleteCacheByPrefix(page.targetPaths().SubResourceBaseTarget)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *PageCollections) assemblePagesMap(s *Site) error {
|
||||
c.pagesMap = newPagesMap(s)
|
||||
|
||||
rootSections := make(map[string]bool)
|
||||
|
||||
// Add all branch nodes first.
|
||||
for _, p := range c.rawAllPages {
|
||||
rootSections[p.Section()] = true
|
||||
if p.IsPage() {
|
||||
continue
|
||||
}
|
||||
c.pagesMap.addPage(p)
|
||||
}
|
||||
|
||||
// Create missing home page and the first level sections if no
|
||||
// _index provided.
|
||||
s.home = c.pagesMap.getOrCreateHome()
|
||||
for k := range rootSections {
|
||||
c.pagesMap.createSectionIfNotExists(k)
|
||||
}
|
||||
|
||||
// Attach the regular pages to their section.
|
||||
for _, p := range c.rawAllPages {
|
||||
if p.IsNode() {
|
||||
continue
|
||||
}
|
||||
c.pagesMap.addPage(p)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *PageCollections) createWorkAllPages() error {
|
||||
c.workAllPages = make(pageStatePages, 0, len(c.rawAllPages))
|
||||
c.headlessPages = make(pageStatePages, 0)
|
||||
|
||||
var (
|
||||
homeDates *resource.Dates
|
||||
sectionDates *resource.Dates
|
||||
siteLastmod time.Time
|
||||
siteLastDate time.Time
|
||||
|
||||
sectionsParamId = "mainSections"
|
||||
sectionsParamIdLower = strings.ToLower(sectionsParamId)
|
||||
)
|
||||
|
||||
mainSections, mainSectionsFound := c.pagesMap.s.Info.Params()[sectionsParamIdLower]
|
||||
|
||||
var (
|
||||
bucketsToRemove []string
|
||||
rootBuckets []*pagesMapBucket
|
||||
)
|
||||
|
||||
c.pagesMap.r.Walk(func(s string, v interface{}) bool {
|
||||
bucket := v.(*pagesMapBucket)
|
||||
var parentBucket *pagesMapBucket
|
||||
|
||||
if s != "/" {
|
||||
_, parentv, found := c.pagesMap.r.LongestPrefix(path.Dir(s))
|
||||
if !found {
|
||||
panic(fmt.Sprintf("[BUG] parent bucket not found for %q", s))
|
||||
}
|
||||
parentBucket = parentv.(*pagesMapBucket)
|
||||
|
||||
if !mainSectionsFound && strings.Count(s, "/") == 1 {
|
||||
// Root section
|
||||
rootBuckets = append(rootBuckets, bucket)
|
||||
}
|
||||
}
|
||||
|
||||
if bucket.owner.IsHome() {
|
||||
if resource.IsZeroDates(bucket.owner) {
|
||||
// Calculate dates from the page tree.
|
||||
homeDates = &bucket.owner.m.Dates
|
||||
}
|
||||
}
|
||||
|
||||
sectionDates = nil
|
||||
if resource.IsZeroDates(bucket.owner) {
|
||||
sectionDates = &bucket.owner.m.Dates
|
||||
}
|
||||
|
||||
if parentBucket != nil {
|
||||
bucket.parent = parentBucket
|
||||
if bucket.owner.IsSection() {
|
||||
parentBucket.bucketSections = append(parentBucket.bucketSections, bucket)
|
||||
}
|
||||
}
|
||||
|
||||
tmp := bucket.pages[:0]
|
||||
for _, x := range bucket.pages {
|
||||
if c.pagesMap.s.shouldBuild(x) {
|
||||
tmp = append(tmp, x)
|
||||
}
|
||||
}
|
||||
bucket.pages = tmp
|
||||
|
||||
if bucket.isEmpty() {
|
||||
if bucket.owner.IsSection() && bucket.owner.File().IsZero() {
|
||||
// Check for any nested section.
|
||||
var hasDescendant bool
|
||||
c.pagesMap.r.WalkPrefix(s, func(ss string, v interface{}) bool {
|
||||
if s != ss {
|
||||
hasDescendant = true
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
if !hasDescendant {
|
||||
// This is an auto-created section with, now, nothing in it.
|
||||
bucketsToRemove = append(bucketsToRemove, s)
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !bucket.disabled {
|
||||
c.workAllPages = append(c.workAllPages, bucket.owner)
|
||||
}
|
||||
|
||||
if !bucket.view {
|
||||
for _, p := range bucket.pages {
|
||||
ps := p.(*pageState)
|
||||
ps.parent = bucket.owner
|
||||
if ps.m.headless {
|
||||
c.headlessPages = append(c.headlessPages, ps)
|
||||
} else {
|
||||
c.workAllPages = append(c.workAllPages, ps)
|
||||
}
|
||||
|
||||
if homeDates != nil {
|
||||
homeDates.UpdateDateAndLastmodIfAfter(ps)
|
||||
}
|
||||
|
||||
if sectionDates != nil {
|
||||
sectionDates.UpdateDateAndLastmodIfAfter(ps)
|
||||
}
|
||||
|
||||
if p.Lastmod().After(siteLastmod) {
|
||||
siteLastmod = p.Lastmod()
|
||||
}
|
||||
if p.Date().After(siteLastDate) {
|
||||
siteLastDate = p.Date()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
})
|
||||
|
||||
c.pagesMap.s.lastmod = siteLastmod
|
||||
|
||||
if !mainSectionsFound {
|
||||
|
||||
// Calculare main section
|
||||
var (
|
||||
maxRootBucketWeight int
|
||||
maxRootBucket *pagesMapBucket
|
||||
)
|
||||
|
||||
for _, b := range rootBuckets {
|
||||
weight := len(b.pages) + (len(b.bucketSections) * 5)
|
||||
if weight >= maxRootBucketWeight {
|
||||
maxRootBucket = b
|
||||
maxRootBucketWeight = weight
|
||||
}
|
||||
}
|
||||
|
||||
if maxRootBucket != nil {
|
||||
// Try to make this as backwards compatible as possible.
|
||||
mainSections = []string{maxRootBucket.owner.Section()}
|
||||
}
|
||||
}
|
||||
|
||||
c.pagesMap.s.Info.Params()[sectionsParamId] = mainSections
|
||||
c.pagesMap.s.Info.Params()[sectionsParamIdLower] = mainSections
|
||||
|
||||
for _, key := range bucketsToRemove {
|
||||
c.pagesMap.r.Delete(key)
|
||||
}
|
||||
|
||||
sort.Sort(c.workAllPages)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@@ -36,9 +36,8 @@ import (
|
||||
|
||||
"github.com/gohugoio/hugo/source"
|
||||
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
|
||||
"github.com/gohugoio/hugo/common/loggers"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/spf13/afero"
|
||||
)
|
||||
|
||||
@@ -109,10 +108,6 @@ type contentDirKey struct {
|
||||
// Collect.
|
||||
func (c *pagesCollector) Collect() error {
|
||||
c.proc.Start(context.Background())
|
||||
if c.tracker != nil {
|
||||
c.tracker.start()
|
||||
defer c.tracker.stop()
|
||||
}
|
||||
|
||||
var collectErr error
|
||||
if len(c.filenames) == 0 {
|
||||
@@ -125,7 +120,7 @@ func (c *pagesCollector) Collect() error {
|
||||
dirs[contentDirKey{dir, filename, btype}] = true
|
||||
}
|
||||
|
||||
for dir, _ := range dirs {
|
||||
for dir := range dirs {
|
||||
switch dir.tp {
|
||||
case bundleLeaf, bundleBranch:
|
||||
collectErr = c.collectDir(dir.dirname, true, nil)
|
||||
|
367
hugolib/pages_map.go
Normal file
367
hugolib/pages_map.go
Normal file
@@ -0,0 +1,367 @@
|
||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
radix "github.com/armon/go-radix"
|
||||
"github.com/spf13/cast"
|
||||
|
||||
"github.com/gohugoio/hugo/resources/page"
|
||||
)
|
||||
|
||||
func newPagesMap(s *Site) *pagesMap {
|
||||
return &pagesMap{
|
||||
r: radix.New(),
|
||||
s: s,
|
||||
}
|
||||
}
|
||||
|
||||
type pagesMap struct {
|
||||
r *radix.Tree
|
||||
s *Site
|
||||
}
|
||||
|
||||
func (m *pagesMap) Get(key string) *pagesMapBucket {
|
||||
key = m.cleanKey(key)
|
||||
v, found := m.r.Get(key)
|
||||
if !found {
|
||||
return nil
|
||||
}
|
||||
|
||||
return v.(*pagesMapBucket)
|
||||
}
|
||||
|
||||
func (m *pagesMap) getKey(p *pageState) string {
|
||||
if !p.File().IsZero() {
|
||||
return m.cleanKey(p.File().Dir())
|
||||
}
|
||||
return m.cleanKey(p.SectionsPath())
|
||||
}
|
||||
|
||||
func (m *pagesMap) getOrCreateHome() *pageState {
|
||||
var home *pageState
|
||||
b, found := m.r.Get("/")
|
||||
if !found {
|
||||
home = m.s.newPage(page.KindHome)
|
||||
m.addBucketFor("/", home, nil)
|
||||
} else {
|
||||
home = b.(*pagesMapBucket).owner
|
||||
}
|
||||
|
||||
return home
|
||||
}
|
||||
|
||||
func (m *pagesMap) createSectionIfNotExists(section string) {
|
||||
key := m.cleanKey(section)
|
||||
_, found := m.r.Get(key)
|
||||
if !found {
|
||||
kind := m.s.kindFromSectionPath(section)
|
||||
p := m.s.newPage(kind, section)
|
||||
m.addBucketFor(key, p, nil)
|
||||
}
|
||||
}
|
||||
|
||||
func (m *pagesMap) addBucket(p *pageState) {
|
||||
key := m.getKey(p)
|
||||
|
||||
m.addBucketFor(key, p, nil)
|
||||
}
|
||||
|
||||
func (m *pagesMap) addBucketFor(key string, p *pageState, meta map[string]interface{}) *pagesMapBucket {
|
||||
var isView bool
|
||||
switch p.Kind() {
|
||||
case page.KindTaxonomy, page.KindTaxonomyTerm:
|
||||
isView = true
|
||||
}
|
||||
|
||||
disabled := !m.s.isEnabled(p.Kind())
|
||||
|
||||
bucket := &pagesMapBucket{owner: p, view: isView, meta: meta, disabled: disabled}
|
||||
p.bucket = bucket
|
||||
|
||||
m.r.Insert(key, bucket)
|
||||
|
||||
return bucket
|
||||
}
|
||||
|
||||
func (m *pagesMap) addPage(p *pageState) {
|
||||
if !p.IsPage() {
|
||||
m.addBucket(p)
|
||||
return
|
||||
}
|
||||
|
||||
if !m.s.isEnabled(page.KindPage) {
|
||||
return
|
||||
}
|
||||
|
||||
key := m.getKey(p)
|
||||
|
||||
var bucket *pagesMapBucket
|
||||
|
||||
_, v, found := m.r.LongestPrefix(key)
|
||||
if !found {
|
||||
panic(fmt.Sprintf("[BUG] bucket with key %q not found", key))
|
||||
}
|
||||
|
||||
bucket = v.(*pagesMapBucket)
|
||||
p.bucket = bucket
|
||||
|
||||
bucket.pages = append(bucket.pages, p)
|
||||
}
|
||||
|
||||
func (m *pagesMap) withEveryPage(f func(p *pageState)) {
|
||||
m.r.Walk(func(k string, v interface{}) bool {
|
||||
b := v.(*pagesMapBucket)
|
||||
f(b.owner)
|
||||
if !b.view {
|
||||
for _, p := range b.pages {
|
||||
f(p.(*pageState))
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
func (m *pagesMap) assembleTaxonomies(s *Site) error {
|
||||
s.Taxonomies = make(TaxonomyList)
|
||||
|
||||
type bucketKey struct {
|
||||
plural string
|
||||
termKey string
|
||||
}
|
||||
|
||||
// Temporary cache.
|
||||
taxonomyBuckets := make(map[bucketKey]*pagesMapBucket)
|
||||
|
||||
for singular, plural := range s.siteCfg.taxonomiesConfig {
|
||||
s.Taxonomies[plural] = make(Taxonomy)
|
||||
bkey := bucketKey{
|
||||
plural: plural,
|
||||
}
|
||||
|
||||
bucket := m.Get(plural)
|
||||
|
||||
if bucket == nil {
|
||||
// Create the page and bucket
|
||||
n := s.newPage(page.KindTaxonomyTerm, plural)
|
||||
|
||||
key := m.cleanKey(plural)
|
||||
bucket = m.addBucketFor(key, n, nil)
|
||||
}
|
||||
|
||||
if bucket.meta == nil {
|
||||
bucket.meta = map[string]interface{}{
|
||||
"singular": singular,
|
||||
"plural": plural,
|
||||
}
|
||||
}
|
||||
|
||||
// Add it to the temporary cache.
|
||||
taxonomyBuckets[bkey] = bucket
|
||||
|
||||
// Taxonomy entries used in page front matter will be picked up later,
|
||||
// but there may be some yet to be used.
|
||||
pluralPrefix := m.cleanKey(plural) + "/"
|
||||
m.r.WalkPrefix(pluralPrefix, func(k string, v interface{}) bool {
|
||||
tb := v.(*pagesMapBucket)
|
||||
termKey := strings.TrimPrefix(k, pluralPrefix)
|
||||
if tb.meta == nil {
|
||||
tb.meta = map[string]interface{}{
|
||||
"singular": singular,
|
||||
"plural": plural,
|
||||
"term": tb.owner.Title(),
|
||||
"termKey": termKey,
|
||||
}
|
||||
}
|
||||
|
||||
bucket.pages = append(bucket.pages, tb.owner)
|
||||
bkey.termKey = termKey
|
||||
taxonomyBuckets[bkey] = tb
|
||||
|
||||
return false
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
addTaxonomy := func(singular, plural, term string, weight int, p page.Page) {
|
||||
bkey := bucketKey{
|
||||
plural: plural,
|
||||
}
|
||||
|
||||
termKey := s.getTaxonomyKey(term)
|
||||
|
||||
b1 := taxonomyBuckets[bkey]
|
||||
|
||||
var b2 *pagesMapBucket
|
||||
bkey.termKey = termKey
|
||||
b, found := taxonomyBuckets[bkey]
|
||||
if found {
|
||||
b2 = b
|
||||
} else {
|
||||
|
||||
// Create the page and bucket
|
||||
n := s.newTaxonomyPage(term, plural, termKey)
|
||||
meta := map[string]interface{}{
|
||||
"singular": singular,
|
||||
"plural": plural,
|
||||
"term": term,
|
||||
"termKey": termKey,
|
||||
}
|
||||
|
||||
key := m.cleanKey(path.Join(plural, termKey))
|
||||
b2 = m.addBucketFor(key, n, meta)
|
||||
b1.pages = append(b1.pages, b2.owner)
|
||||
taxonomyBuckets[bkey] = b2
|
||||
|
||||
}
|
||||
|
||||
w := page.NewWeightedPage(weight, p, b2.owner)
|
||||
|
||||
s.Taxonomies[plural].add(termKey, w)
|
||||
|
||||
b1.owner.m.Dates.UpdateDateAndLastmodIfAfter(p)
|
||||
b2.owner.m.Dates.UpdateDateAndLastmodIfAfter(p)
|
||||
}
|
||||
|
||||
m.r.Walk(func(k string, v interface{}) bool {
|
||||
b := v.(*pagesMapBucket)
|
||||
if b.view {
|
||||
return false
|
||||
}
|
||||
|
||||
for singular, plural := range s.siteCfg.taxonomiesConfig {
|
||||
for _, p := range b.pages {
|
||||
|
||||
vals := getParam(p, plural, false)
|
||||
|
||||
w := getParamToLower(p, plural+"_weight")
|
||||
weight, err := cast.ToIntE(w)
|
||||
if err != nil {
|
||||
m.s.Log.ERROR.Printf("Unable to convert taxonomy weight %#v to int for %q", w, p.Path())
|
||||
// weight will equal zero, so let the flow continue
|
||||
}
|
||||
|
||||
if vals != nil {
|
||||
if v, ok := vals.([]string); ok {
|
||||
for _, idx := range v {
|
||||
addTaxonomy(singular, plural, idx, weight, p)
|
||||
}
|
||||
} else if v, ok := vals.(string); ok {
|
||||
addTaxonomy(singular, plural, v, weight, p)
|
||||
} else {
|
||||
m.s.Log.ERROR.Printf("Invalid %s in %q\n", plural, p.Path())
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
return false
|
||||
})
|
||||
|
||||
for _, plural := range s.siteCfg.taxonomiesConfig {
|
||||
for k := range s.Taxonomies[plural] {
|
||||
s.Taxonomies[plural][k].Sort()
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *pagesMap) cleanKey(key string) string {
|
||||
key = filepath.ToSlash(strings.ToLower(key))
|
||||
key = strings.Trim(key, "/")
|
||||
return "/" + key
|
||||
}
|
||||
|
||||
func (m *pagesMap) dump() {
|
||||
m.r.Walk(func(s string, v interface{}) bool {
|
||||
b := v.(*pagesMapBucket)
|
||||
fmt.Println("-------\n", s, ":", b.owner.Kind(), ":")
|
||||
if b.owner != nil {
|
||||
fmt.Println("Owner:", b.owner.Path())
|
||||
}
|
||||
for _, p := range b.pages {
|
||||
fmt.Println(p.Path())
|
||||
}
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
type pagesMapBucket struct {
|
||||
// Set if the pages in this bucket is also present in another bucket.
|
||||
view bool
|
||||
|
||||
// Some additional metatadata attached to this node.
|
||||
meta map[string]interface{}
|
||||
|
||||
owner *pageState // The branch node
|
||||
|
||||
// When disableKinds is enabled for this node.
|
||||
disabled bool
|
||||
|
||||
// Used to navigate the sections tree
|
||||
parent *pagesMapBucket
|
||||
bucketSections []*pagesMapBucket
|
||||
|
||||
pagesInit sync.Once
|
||||
pages page.Pages
|
||||
|
||||
pagesAndSectionsInit sync.Once
|
||||
pagesAndSections page.Pages
|
||||
|
||||
sectionsInit sync.Once
|
||||
sections page.Pages
|
||||
}
|
||||
|
||||
func (b *pagesMapBucket) isEmpty() bool {
|
||||
return len(b.pages) == 0 && len(b.bucketSections) == 0
|
||||
}
|
||||
|
||||
func (b *pagesMapBucket) getPages() page.Pages {
|
||||
b.pagesInit.Do(func() {
|
||||
page.SortByDefault(b.pages)
|
||||
})
|
||||
return b.pages
|
||||
}
|
||||
|
||||
func (b *pagesMapBucket) getPagesAndSections() page.Pages {
|
||||
b.pagesAndSectionsInit.Do(func() {
|
||||
var pas page.Pages
|
||||
pas = append(pas, b.pages...)
|
||||
for _, p := range b.bucketSections {
|
||||
pas = append(pas, p.owner)
|
||||
}
|
||||
b.pagesAndSections = pas
|
||||
page.SortByDefault(b.pagesAndSections)
|
||||
})
|
||||
return b.pagesAndSections
|
||||
}
|
||||
|
||||
func (b *pagesMapBucket) getSections() page.Pages {
|
||||
b.sectionsInit.Do(func() {
|
||||
for _, p := range b.bucketSections {
|
||||
b.sections = append(b.sections, p.owner)
|
||||
}
|
||||
page.SortByDefault(b.sections)
|
||||
})
|
||||
|
||||
return b.sections
|
||||
}
|
198
hugolib/site.go
198
hugolib/site.go
@@ -58,7 +58,6 @@ import (
|
||||
"github.com/gohugoio/hugo/related"
|
||||
"github.com/gohugoio/hugo/resources"
|
||||
"github.com/gohugoio/hugo/resources/page/pagemeta"
|
||||
"github.com/gohugoio/hugo/resources/resource"
|
||||
"github.com/gohugoio/hugo/source"
|
||||
"github.com/gohugoio/hugo/tpl"
|
||||
|
||||
@@ -94,15 +93,11 @@ type Site struct {
|
||||
|
||||
Taxonomies TaxonomyList
|
||||
|
||||
taxonomyNodes *taxonomyNodeInfos
|
||||
|
||||
Sections Taxonomy
|
||||
Info SiteInfo
|
||||
|
||||
layoutHandler *output.LayoutHandler
|
||||
|
||||
buildStats *buildStats
|
||||
|
||||
language *langs.Language
|
||||
|
||||
siteCfg siteConfigHolder
|
||||
@@ -216,12 +211,13 @@ func (s *Site) prepareInits() {
|
||||
|
||||
s.init.prevNextInSection = init.Branch(func() (interface{}, error) {
|
||||
var rootSection []int
|
||||
// TODO(bep) cm attach this to the bucket.
|
||||
for i, p1 := range s.workAllPages {
|
||||
if p1.IsPage() && p1.Section() == "" {
|
||||
rootSection = append(rootSection, i)
|
||||
}
|
||||
if p1.IsSection() {
|
||||
sectionPages := p1.Pages()
|
||||
sectionPages := p1.RegularPages()
|
||||
for i, p2 := range sectionPages {
|
||||
p2s := p2.(*pageState)
|
||||
if p2s.posNextPrevSection == nil {
|
||||
@@ -263,28 +259,6 @@ func (s *Site) prepareInits() {
|
||||
|
||||
}
|
||||
|
||||
// Build stats for a given site.
|
||||
type buildStats struct {
|
||||
draftCount int
|
||||
futureCount int
|
||||
expiredCount int
|
||||
}
|
||||
|
||||
// TODO(bep) consolidate all site stats into this
|
||||
func (b *buildStats) update(p page.Page) {
|
||||
if p.Draft() {
|
||||
b.draftCount++
|
||||
}
|
||||
|
||||
if resource.IsFuture(p) {
|
||||
b.futureCount++
|
||||
}
|
||||
|
||||
if resource.IsExpired(p) {
|
||||
b.expiredCount++
|
||||
}
|
||||
}
|
||||
|
||||
type siteRenderingContext struct {
|
||||
output.Format
|
||||
}
|
||||
@@ -355,9 +329,8 @@ func (s *Site) reset() *Site {
|
||||
publisher: s.publisher,
|
||||
siteConfigConfig: s.siteConfigConfig,
|
||||
enableInlineShortcodes: s.enableInlineShortcodes,
|
||||
buildStats: &buildStats{},
|
||||
init: s.init,
|
||||
PageCollections: newPageCollections(),
|
||||
PageCollections: s.PageCollections,
|
||||
siteCfg: s.siteCfg,
|
||||
}
|
||||
|
||||
@@ -453,7 +426,6 @@ func newSite(cfg deps.DepsCfg) (*Site, error) {
|
||||
outputFormatsConfig: siteOutputFormatsConfig,
|
||||
mediaTypesConfig: siteMediaTypesConfig,
|
||||
frontmatterHandler: frontMatterHandler,
|
||||
buildStats: &buildStats{},
|
||||
enableInlineShortcodes: cfg.Language.GetBool("enableInlineShortcodes"),
|
||||
siteCfg: siteConfig,
|
||||
}
|
||||
@@ -920,7 +892,7 @@ func (s *Site) translateFileEvents(events []fsnotify.Event) []fsnotify.Event {
|
||||
// reBuild partially rebuilds a site given the filesystem events.
|
||||
// It returns whetever the content source was changed.
|
||||
// TODO(bep) clean up/rewrite this method.
|
||||
func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
|
||||
func (s *Site) processPartial(config *BuildCfg, init func(config *BuildCfg) error, events []fsnotify.Event) error {
|
||||
|
||||
events = s.filterFileEvents(events)
|
||||
events = s.translateFileEvents(events)
|
||||
@@ -974,6 +946,18 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
|
||||
}
|
||||
}
|
||||
|
||||
changed := &whatChanged{
|
||||
source: len(sourceChanged) > 0 || len(shortcodesChanged) > 0,
|
||||
other: len(tmplChanged) > 0 || len(i18nChanged) > 0 || len(dataChanged) > 0,
|
||||
files: sourceFilesChanged,
|
||||
}
|
||||
|
||||
config.whatChanged = changed
|
||||
|
||||
if err := init(config); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// These in memory resource caches will be rebuilt on demand.
|
||||
for _, s := range s.h.Sites {
|
||||
s.ResourceSpec.ResourceCache.DeletePartitions(cachePartitions...)
|
||||
@@ -987,7 +971,7 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
|
||||
|
||||
// TOD(bep) globals clean
|
||||
if err := first.Deps.LoadResources(); err != nil {
|
||||
return whatChanged{}, err
|
||||
return err
|
||||
}
|
||||
|
||||
for i := 1; i < len(sites); i++ {
|
||||
@@ -1003,7 +987,7 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return whatChanged{}, err
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1062,18 +1046,12 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
|
||||
filenamesChanged = helpers.UniqueStringsReuse(filenamesChanged)
|
||||
|
||||
if err := s.readAndProcessContent(filenamesChanged...); err != nil {
|
||||
return whatChanged{}, err
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
changed := whatChanged{
|
||||
source: len(sourceChanged) > 0 || len(shortcodesChanged) > 0,
|
||||
other: len(tmplChanged) > 0 || len(i18nChanged) > 0 || len(dataChanged) > 0,
|
||||
files: sourceFilesChanged,
|
||||
}
|
||||
|
||||
return changed, nil
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
@@ -1090,54 +1068,6 @@ func (s *Site) process(config BuildCfg) (err error) {
|
||||
|
||||
}
|
||||
|
||||
func (s *Site) setupSitePages() {
|
||||
var homeDates *resource.Dates
|
||||
if s.home != nil {
|
||||
// If the home page has no dates set, we fall back to the site dates.
|
||||
homeDates = &s.home.m.Dates
|
||||
}
|
||||
|
||||
if !s.lastmod.IsZero() && (homeDates == nil || !resource.IsZeroDates(homeDates)) {
|
||||
return
|
||||
}
|
||||
|
||||
if homeDates != nil && !s.lastmod.IsZero() {
|
||||
homeDates.FDate = s.lastmod
|
||||
homeDates.FLastmod = s.lastmod
|
||||
return
|
||||
|
||||
}
|
||||
|
||||
var siteLastmod time.Time
|
||||
var siteLastDate time.Time
|
||||
|
||||
for _, page := range s.workAllPages {
|
||||
if !page.IsPage() {
|
||||
continue
|
||||
}
|
||||
// Determine Site.Info.LastChange
|
||||
// Note that the logic to determine which date to use for Lastmod
|
||||
// is already applied, so this is *the* date to use.
|
||||
// We cannot just pick the last page in the default sort, because
|
||||
// that may not be ordered by date.
|
||||
// TODO(bep) check if this can be done earlier
|
||||
if page.Lastmod().After(siteLastmod) {
|
||||
siteLastmod = page.Lastmod()
|
||||
}
|
||||
if page.Date().After(siteLastDate) {
|
||||
siteLastDate = page.Date()
|
||||
}
|
||||
}
|
||||
|
||||
s.lastmod = siteLastmod
|
||||
|
||||
if homeDates != nil && resource.IsZeroDates(homeDates) {
|
||||
homeDates.FDate = siteLastDate
|
||||
homeDates.FLastmod = s.lastmod
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (s *Site) render(ctx *siteRenderContext) (err error) {
|
||||
|
||||
if err := page.Clear(); err != nil {
|
||||
@@ -1483,81 +1413,22 @@ func (s *Site) getTaxonomyKey(key string) string {
|
||||
return strings.ToLower(s.PathSpec.MakePath(key))
|
||||
}
|
||||
|
||||
func (s *Site) assembleTaxonomies() error {
|
||||
s.Taxonomies = make(TaxonomyList)
|
||||
taxonomies := s.siteCfg.taxonomiesConfig
|
||||
for _, plural := range taxonomies {
|
||||
s.Taxonomies[plural] = make(Taxonomy)
|
||||
}
|
||||
|
||||
s.taxonomyNodes = &taxonomyNodeInfos{
|
||||
m: make(map[string]*taxonomyNodeInfo),
|
||||
getKey: s.getTaxonomyKey,
|
||||
}
|
||||
|
||||
s.Log.INFO.Printf("found taxonomies: %#v\n", taxonomies)
|
||||
|
||||
for singular, plural := range taxonomies {
|
||||
parent := s.taxonomyNodes.GetOrCreate(plural, "")
|
||||
parent.singular = singular
|
||||
|
||||
addTaxonomy := func(plural, term string, weight int, p page.Page) {
|
||||
key := s.getTaxonomyKey(term)
|
||||
|
||||
n := s.taxonomyNodes.GetOrCreate(plural, term)
|
||||
n.parent = parent
|
||||
|
||||
w := page.NewWeightedPage(weight, p, n.owner)
|
||||
|
||||
s.Taxonomies[plural].add(key, w)
|
||||
|
||||
n.UpdateFromPage(w.Page)
|
||||
parent.UpdateFromPage(w.Page)
|
||||
}
|
||||
|
||||
for _, p := range s.workAllPages {
|
||||
vals := getParam(p, plural, false)
|
||||
|
||||
w := getParamToLower(p, plural+"_weight")
|
||||
weight, err := cast.ToIntE(w)
|
||||
if err != nil {
|
||||
s.Log.ERROR.Printf("Unable to convert taxonomy weight %#v to int for %q", w, p.pathOrTitle())
|
||||
// weight will equal zero, so let the flow continue
|
||||
}
|
||||
|
||||
if vals != nil {
|
||||
if v, ok := vals.([]string); ok {
|
||||
for _, idx := range v {
|
||||
addTaxonomy(plural, idx, weight, p)
|
||||
}
|
||||
} else if v, ok := vals.(string); ok {
|
||||
addTaxonomy(plural, v, weight, p)
|
||||
} else {
|
||||
s.Log.ERROR.Printf("Invalid %s in %q\n", plural, p.pathOrTitle())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for k := range s.Taxonomies[plural] {
|
||||
s.Taxonomies[plural][k].Sort()
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Prepare site for a new full build.
|
||||
func (s *Site) resetBuildState() {
|
||||
func (s *Site) resetBuildState(sourceChanged bool) {
|
||||
s.relatedDocsHandler = s.relatedDocsHandler.Clone()
|
||||
s.PageCollections = newPageCollectionsFromPages(s.rawAllPages)
|
||||
s.buildStats = &buildStats{}
|
||||
s.init.Reset()
|
||||
|
||||
for _, p := range s.rawAllPages {
|
||||
p.pagePages = &pagePages{}
|
||||
p.subSections = page.Pages{}
|
||||
p.parent = nil
|
||||
p.Scratcher = maps.NewScratcher()
|
||||
if sourceChanged {
|
||||
s.PageCollections = newPageCollectionsFromPages(s.rawAllPages)
|
||||
for _, p := range s.rawAllPages {
|
||||
p.pagePages = &pagePages{}
|
||||
p.parent = nil
|
||||
p.Scratcher = maps.NewScratcher()
|
||||
}
|
||||
} else {
|
||||
s.pagesMap.withEveryPage(func(p *pageState) {
|
||||
p.Scratcher = maps.NewScratcher()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1759,8 +1630,11 @@ func (s *Site) kindFromSections(sections []string) string {
|
||||
return page.KindHome
|
||||
}
|
||||
|
||||
sectionPath := path.Join(sections...)
|
||||
return s.kindFromSectionPath(path.Join(sections...))
|
||||
|
||||
}
|
||||
|
||||
func (s *Site) kindFromSectionPath(sectionPath string) string {
|
||||
for _, plural := range s.siteCfg.taxonomiesConfig {
|
||||
if plural == sectionPath {
|
||||
return page.KindTaxonomyTerm
|
||||
|
@@ -14,14 +14,7 @@
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/gohugoio/hugo/resources/page"
|
||||
"github.com/gohugoio/hugo/resources/resource"
|
||||
|
||||
radix "github.com/hashicorp/go-immutable-radix"
|
||||
)
|
||||
|
||||
// Sections returns the top level sections.
|
||||
@@ -37,208 +30,3 @@ func (s *SiteInfo) Sections() page.Pages {
|
||||
func (s *SiteInfo) Home() (page.Page, error) {
|
||||
return s.s.home, nil
|
||||
}
|
||||
|
||||
func (s *Site) assembleSections() pageStatePages {
|
||||
var newPages pageStatePages
|
||||
|
||||
if !s.isEnabled(page.KindSection) {
|
||||
return newPages
|
||||
}
|
||||
|
||||
// Maps section kind pages to their path, i.e. "my/section"
|
||||
sectionPages := make(map[string]*pageState)
|
||||
|
||||
// The sections with content files will already have been created.
|
||||
for _, sect := range s.findWorkPagesByKind(page.KindSection) {
|
||||
sectionPages[sect.SectionsPath()] = sect
|
||||
}
|
||||
|
||||
const (
|
||||
sectKey = "__hs"
|
||||
sectSectKey = "_a" + sectKey
|
||||
sectPageKey = "_b" + sectKey
|
||||
)
|
||||
|
||||
var (
|
||||
inPages = radix.New().Txn()
|
||||
inSections = radix.New().Txn()
|
||||
undecided pageStatePages
|
||||
)
|
||||
|
||||
home := s.findFirstWorkPageByKindIn(page.KindHome)
|
||||
|
||||
for i, p := range s.workAllPages {
|
||||
|
||||
if p.Kind() != page.KindPage {
|
||||
continue
|
||||
}
|
||||
|
||||
sections := p.SectionsEntries()
|
||||
|
||||
if len(sections) == 0 {
|
||||
// Root level pages. These will have the home page as their Parent.
|
||||
p.parent = home
|
||||
continue
|
||||
}
|
||||
|
||||
sectionKey := p.SectionsPath()
|
||||
_, found := sectionPages[sectionKey]
|
||||
|
||||
if !found && len(sections) == 1 {
|
||||
|
||||
// We only create content-file-less sections for the root sections.
|
||||
n := s.newPage(page.KindSection, sections[0])
|
||||
|
||||
sectionPages[sectionKey] = n
|
||||
newPages = append(newPages, n)
|
||||
found = true
|
||||
}
|
||||
|
||||
if len(sections) > 1 {
|
||||
// Create the root section if not found.
|
||||
_, rootFound := sectionPages[sections[0]]
|
||||
if !rootFound {
|
||||
sect := s.newPage(page.KindSection, sections[0])
|
||||
sectionPages[sections[0]] = sect
|
||||
newPages = append(newPages, sect)
|
||||
}
|
||||
}
|
||||
|
||||
if found {
|
||||
pagePath := path.Join(sectionKey, sectPageKey, strconv.Itoa(i))
|
||||
inPages.Insert([]byte(pagePath), p)
|
||||
} else {
|
||||
undecided = append(undecided, p)
|
||||
}
|
||||
}
|
||||
|
||||
// Create any missing sections in the tree.
|
||||
// A sub-section needs a content file, but to create a navigational tree,
|
||||
// given a content file in /content/a/b/c/_index.md, we cannot create just
|
||||
// the c section.
|
||||
for _, sect := range sectionPages {
|
||||
sections := sect.SectionsEntries()
|
||||
for i := len(sections); i > 0; i-- {
|
||||
sectionPath := sections[:i]
|
||||
sectionKey := path.Join(sectionPath...)
|
||||
_, found := sectionPages[sectionKey]
|
||||
if !found {
|
||||
sect = s.newPage(page.KindSection, sectionPath[len(sectionPath)-1])
|
||||
sect.m.sections = sectionPath
|
||||
sectionPages[sectionKey] = sect
|
||||
newPages = append(newPages, sect)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for k, sect := range sectionPages {
|
||||
inPages.Insert([]byte(path.Join(k, sectSectKey)), sect)
|
||||
inSections.Insert([]byte(k), sect)
|
||||
}
|
||||
|
||||
var (
|
||||
currentSection *pageState
|
||||
children page.Pages
|
||||
dates *resource.Dates
|
||||
rootSections = inSections.Commit().Root()
|
||||
)
|
||||
|
||||
for i, p := range undecided {
|
||||
// Now we can decide where to put this page into the tree.
|
||||
sectionKey := p.SectionsPath()
|
||||
|
||||
_, v, _ := rootSections.LongestPrefix([]byte(sectionKey))
|
||||
sect := v.(*pageState)
|
||||
pagePath := path.Join(path.Join(sect.SectionsEntries()...), sectSectKey, "u", strconv.Itoa(i))
|
||||
inPages.Insert([]byte(pagePath), p)
|
||||
}
|
||||
|
||||
var rootPages = inPages.Commit().Root()
|
||||
|
||||
rootPages.Walk(func(path []byte, v interface{}) bool {
|
||||
p := v.(*pageState)
|
||||
|
||||
if p.Kind() == page.KindSection {
|
||||
if currentSection != nil {
|
||||
// A new section
|
||||
currentSection.setPages(children)
|
||||
if dates != nil {
|
||||
currentSection.m.Dates = *dates
|
||||
}
|
||||
}
|
||||
|
||||
currentSection = p
|
||||
children = make(page.Pages, 0)
|
||||
dates = nil
|
||||
// Use section's dates from front matter if set.
|
||||
if resource.IsZeroDates(currentSection) {
|
||||
dates = &resource.Dates{}
|
||||
}
|
||||
|
||||
return false
|
||||
|
||||
}
|
||||
|
||||
// Regular page
|
||||
p.parent = currentSection
|
||||
children = append(children, p)
|
||||
if dates != nil {
|
||||
dates.UpdateDateAndLastmodIfAfter(p)
|
||||
}
|
||||
|
||||
return false
|
||||
})
|
||||
|
||||
if currentSection != nil {
|
||||
currentSection.setPages(children)
|
||||
if dates != nil {
|
||||
currentSection.m.Dates = *dates
|
||||
}
|
||||
}
|
||||
|
||||
// Build the sections hierarchy
|
||||
for _, sect := range sectionPages {
|
||||
sections := sect.SectionsEntries()
|
||||
if len(sections) == 1 {
|
||||
if home != nil {
|
||||
sect.parent = home
|
||||
}
|
||||
} else {
|
||||
parentSearchKey := path.Join(sect.SectionsEntries()[:len(sections)-1]...)
|
||||
_, v, _ := rootSections.LongestPrefix([]byte(parentSearchKey))
|
||||
p := v.(*pageState)
|
||||
sect.parent = p
|
||||
}
|
||||
|
||||
sect.addSectionToParent()
|
||||
}
|
||||
|
||||
var (
|
||||
sectionsParamId = "mainSections"
|
||||
sectionsParamIdLower = strings.ToLower(sectionsParamId)
|
||||
mainSections interface{}
|
||||
mainSectionsFound bool
|
||||
maxSectionWeight int
|
||||
)
|
||||
|
||||
mainSections, mainSectionsFound = s.Info.Params()[sectionsParamIdLower]
|
||||
|
||||
for _, sect := range sectionPages {
|
||||
sect.sortParentSections()
|
||||
|
||||
if !mainSectionsFound {
|
||||
weight := len(sect.Pages()) + (len(sect.Sections()) * 5)
|
||||
if weight >= maxSectionWeight {
|
||||
mainSections = []string{sect.Section()}
|
||||
maxSectionWeight = weight
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try to make this as backwards compatible as possible.
|
||||
s.Info.Params()[sectionsParamId] = mainSections
|
||||
s.Info.Params()[sectionsParamIdLower] = mainSections
|
||||
|
||||
return newPages
|
||||
|
||||
}
|
||||
|
@@ -137,21 +137,20 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
|
||||
}},
|
||||
{"empty1", func(assert *require.Assertions, p page.Page) {
|
||||
// > b,c
|
||||
assert.NotNil(getPage(p, "/empty1/b"))
|
||||
assert.Nil(getPage(p, "/empty1/b")) // No _index.md page.
|
||||
assert.NotNil(getPage(p, "/empty1/b/c"))
|
||||
|
||||
}},
|
||||
{"empty2", func(assert *require.Assertions, p page.Page) {
|
||||
// > b,c,d where b and d have content files.
|
||||
// > b,c,d where b and d have _index.md files.
|
||||
b := getPage(p, "/empty2/b")
|
||||
assert.NotNil(b)
|
||||
assert.Equal("T40_-1", b.Title())
|
||||
|
||||
c := getPage(p, "/empty2/b/c")
|
||||
assert.Nil(c) // No _index.md
|
||||
|
||||
assert.NotNil(c)
|
||||
assert.Equal("Cs", c.Title())
|
||||
d := getPage(p, "/empty2/b/c/d")
|
||||
|
||||
assert.NotNil(d)
|
||||
assert.Equal("T41_-1", d.Title())
|
||||
|
||||
@@ -163,9 +162,10 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
|
||||
{"empty3", func(assert *require.Assertions, p page.Page) {
|
||||
// b,c,d with regular page in b
|
||||
b := getPage(p, "/empty3/b")
|
||||
assert.NotNil(b)
|
||||
assert.Len(b.Pages(), 1)
|
||||
assert.Equal("empty3.md", b.Pages()[0].File().LogicalName())
|
||||
assert.Nil(b) // No _index.md
|
||||
e3 := getPage(p, "/empty3/b/empty3")
|
||||
assert.NotNil(e3)
|
||||
assert.Equal("empty3.md", e3.File().LogicalName())
|
||||
|
||||
}},
|
||||
{"empty3", func(assert *require.Assertions, p page.Page) {
|
||||
@@ -188,19 +188,23 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
|
||||
}},
|
||||
{"l1", func(assert *require.Assertions, p page.Page) {
|
||||
assert.Equal("L1s", p.Title())
|
||||
assert.Len(p.Pages(), 2)
|
||||
assert.Len(p.Pages(), 4) // 2 pages + 2 sections
|
||||
assert.True(p.Parent().IsHome())
|
||||
assert.Len(p.Sections(), 2)
|
||||
}},
|
||||
{"l1,l2", func(assert *require.Assertions, p page.Page) {
|
||||
assert.Equal("T2_-1", p.Title())
|
||||
assert.Len(p.Pages(), 3)
|
||||
assert.Len(p.Pages(), 4) // 3 pages + 1 section
|
||||
assert.Equal(p, p.Pages()[0].Parent())
|
||||
assert.Equal("L1s", p.Parent().Title())
|
||||
assert.Equal("/l1/l2/", p.RelPermalink())
|
||||
assert.Len(p.Sections(), 1)
|
||||
|
||||
for _, child := range p.Pages() {
|
||||
if child.IsSection() {
|
||||
assert.Equal(child, child.CurrentSection())
|
||||
continue
|
||||
}
|
||||
|
||||
assert.Equal(p, child.CurrentSection())
|
||||
active, err := child.InSection(p)
|
||||
|
@@ -15,13 +15,11 @@ package hugolib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path"
|
||||
"sort"
|
||||
|
||||
"github.com/gohugoio/hugo/compare"
|
||||
|
||||
"github.com/gohugoio/hugo/resources/page"
|
||||
"github.com/gohugoio/hugo/resources/resource"
|
||||
)
|
||||
|
||||
// The TaxonomyList is a list of all taxonomies and their values
|
||||
@@ -156,95 +154,3 @@ func (s *orderedTaxonomySorter) Swap(i, j int) {
|
||||
func (s *orderedTaxonomySorter) Less(i, j int) bool {
|
||||
return s.by(&s.taxonomy[i], &s.taxonomy[j])
|
||||
}
|
||||
|
||||
// taxonomyNodeInfo stores additional metadata about a taxonomy.
|
||||
type taxonomyNodeInfo struct {
|
||||
plural string
|
||||
|
||||
// Maps "tags" to "tag".
|
||||
singular string
|
||||
|
||||
// The term key as used in the taxonomy map, e.g "tag1".
|
||||
// The value is normalized for paths, but may or not be lowercased
|
||||
// depending on the disablePathToLower setting.
|
||||
termKey string
|
||||
|
||||
// The original, unedited term name. Useful for titles etc.
|
||||
term string
|
||||
|
||||
dates resource.Dates
|
||||
|
||||
parent *taxonomyNodeInfo
|
||||
|
||||
// Either of Kind taxonomyTerm (parent) or taxonomy
|
||||
owner *page.PageWrapper
|
||||
}
|
||||
|
||||
func (t *taxonomyNodeInfo) UpdateFromPage(p page.Page) {
|
||||
|
||||
// Select the latest dates
|
||||
t.dates.UpdateDateAndLastmodIfAfter(p)
|
||||
}
|
||||
|
||||
func (t *taxonomyNodeInfo) TransferValues(p *pageState) {
|
||||
t.owner.Page = p
|
||||
if p.Lastmod().IsZero() && p.Date().IsZero() {
|
||||
p.m.Dates.UpdateDateAndLastmodIfAfter(t.dates)
|
||||
}
|
||||
}
|
||||
|
||||
// Maps either plural or plural/term to a taxonomy node.
|
||||
// TODO(bep) consolidate somehow with s.Taxonomies
|
||||
type taxonomyNodeInfos struct {
|
||||
m map[string]*taxonomyNodeInfo
|
||||
getKey func(string) string
|
||||
}
|
||||
|
||||
// map[string]*taxonomyNodeInfo
|
||||
func (t taxonomyNodeInfos) key(parts ...string) string {
|
||||
return path.Join(parts...)
|
||||
}
|
||||
|
||||
// GetOrAdd will get or create and add a new taxonomy node to the parent identified with plural.
|
||||
// It will panic if the parent does not exist.
|
||||
func (t taxonomyNodeInfos) GetOrAdd(plural, term string) *taxonomyNodeInfo {
|
||||
parent := t.GetOrCreate(plural, "")
|
||||
if parent == nil {
|
||||
panic(fmt.Sprintf("no parent found with plural %q", plural))
|
||||
}
|
||||
child := t.GetOrCreate(plural, term)
|
||||
child.parent = parent
|
||||
return child
|
||||
}
|
||||
|
||||
func (t taxonomyNodeInfos) GetOrCreate(plural, term string) *taxonomyNodeInfo {
|
||||
termKey := t.getKey(term)
|
||||
key := t.key(plural, termKey)
|
||||
|
||||
n, found := t.m[key]
|
||||
if found {
|
||||
return n
|
||||
}
|
||||
|
||||
n = &taxonomyNodeInfo{
|
||||
plural: plural,
|
||||
termKey: termKey,
|
||||
term: term,
|
||||
owner: &page.PageWrapper{}, // Page will be assigned later.
|
||||
}
|
||||
|
||||
t.m[key] = n
|
||||
|
||||
return n
|
||||
}
|
||||
|
||||
func (t taxonomyNodeInfos) Get(sections ...string) *taxonomyNodeInfo {
|
||||
key := t.key(sections...)
|
||||
|
||||
n, found := t.m[key]
|
||||
if found {
|
||||
return n
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@@ -168,7 +168,7 @@ permalinkeds:
|
||||
for taxonomy, count := range taxonomyTermPageCounts {
|
||||
term := s.getPage(page.KindTaxonomyTerm, taxonomy)
|
||||
require.NotNil(t, term)
|
||||
require.Len(t, term.Pages(), count)
|
||||
require.Len(t, term.Pages(), count, taxonomy)
|
||||
|
||||
for _, p := range term.Pages() {
|
||||
require.Equal(t, page.KindTaxonomy, p.Kind())
|
||||
|
@@ -698,6 +698,7 @@ type testHelper struct {
|
||||
}
|
||||
|
||||
func (th testHelper) assertFileContent(filename string, matches ...string) {
|
||||
th.T.Helper()
|
||||
filename = th.replaceDefaultContentLanguageValue(filename)
|
||||
content := readDestination(th.T, th.Fs, filename)
|
||||
for _, match := range matches {
|
||||
|
Reference in New Issue
Block a user