Create pages from _content.gotmpl

Closes #12427
Closes #12485
Closes #6310
Closes #5074
This commit is contained in:
Bjørn Erik Pedersen
2024-03-17 11:12:33 +01:00
parent 55dea41c1a
commit e2d66e3218
60 changed files with 2391 additions and 438 deletions

View File

@@ -42,18 +42,20 @@ func newPagesCollector(
logger loggers.Logger,
infoLogger logg.LevelLogger,
m *pageMap,
buildConfig *BuildCfg,
ids []pathChange,
) *pagesCollector {
return &pagesCollector{
ctx: ctx,
h: h,
fs: sp.BaseFs.Content.Fs,
m: m,
sp: sp,
logger: logger,
infoLogger: infoLogger,
ids: ids,
seenDirs: make(map[string]bool),
ctx: ctx,
h: h,
fs: sp.BaseFs.Content.Fs,
m: m,
sp: sp,
logger: logger,
infoLogger: infoLogger,
buildConfig: buildConfig,
ids: ids,
seenDirs: make(map[string]bool),
}
}
@@ -68,6 +70,8 @@ type pagesCollector struct {
fs afero.Fs
buildConfig *BuildCfg
// List of paths that have changed. Used in partial builds.
ids []pathChange
seenDirs map[string]bool
@@ -82,6 +86,8 @@ func (c *pagesCollector) Collect() (collectErr error) {
var (
numWorkers = c.h.numWorkers
numFilesProcessedTotal atomic.Uint64
numPagesProcessedTotal atomic.Uint64
numResourcesProcessed atomic.Uint64
numFilesProcessedLast uint64
fileBatchTimer = time.Now()
fileBatchTimerMu sync.Mutex
@@ -98,6 +104,8 @@ func (c *pagesCollector) Collect() (collectErr error) {
logg.Fields{
logg.Field{Name: "files", Value: numFilesProcessedBatch},
logg.Field{Name: "files_total", Value: numFilesProcessedTotal.Load()},
logg.Field{Name: "pages_total", Value: numPagesProcessedTotal.Load()},
logg.Field{Name: "resources_total", Value: numResourcesProcessed.Load()},
},
"",
)
@@ -113,10 +121,13 @@ func (c *pagesCollector) Collect() (collectErr error) {
c.g = rungroup.Run[hugofs.FileMetaInfo](c.ctx, rungroup.Config[hugofs.FileMetaInfo]{
NumWorkers: numWorkers,
Handle: func(ctx context.Context, fi hugofs.FileMetaInfo) error {
if err := c.m.AddFi(fi); err != nil {
numPages, numResources, err := c.m.AddFi(fi, c.buildConfig)
if err != nil {
return hugofs.AddFileInfoToError(err, fi, c.fs)
}
numFilesProcessedTotal.Add(1)
numPagesProcessedTotal.Add(numPages)
numResourcesProcessed.Add(numResources)
if numFilesProcessedTotal.Load()%1000 == 0 {
logFilesProcessed(false)
}
@@ -243,6 +254,21 @@ func (c *pagesCollector) collectDirDir(path string, root hugofs.FileMetaInfo, in
return nil, nil
}
n := 0
for _, fi := range readdir {
if fi.Meta().PathInfo.IsContentData() {
// _content.json
// These are not part of any bundle, so just add them directly and remove them from the readdir slice.
if err := c.g.Enqueue(fi); err != nil {
return nil, err
}
} else {
readdir[n] = fi
n++
}
}
readdir = readdir[:n]
// Pick the first regular file.
var first hugofs.FileMetaInfo
for _, fi := range readdir {
@@ -260,6 +286,7 @@ func (c *pagesCollector) collectDirDir(path string, root hugofs.FileMetaInfo, in
// Any bundle file will always be first.
firstPi := first.Meta().PathInfo
if firstPi == nil {
panic(fmt.Sprintf("collectDirDir: no path info for %q", first.Meta().Filename))
}
@@ -320,6 +347,7 @@ func (c *pagesCollector) collectDirDir(path string, root hugofs.FileMetaInfo, in
Info: root,
Fs: c.fs,
IgnoreFile: c.h.SourceSpec.IgnoreFile,
PathParser: c.h.Conf.PathParser(),
HookPre: preHook,
HookPost: postHook,
WalkFn: wfn,
@@ -370,6 +398,7 @@ func (c *pagesCollector) handleBundleLeaf(dir, bundle hugofs.FileMetaInfo, inPat
Info: dir,
DirEntries: readdir,
IgnoreFile: c.h.SourceSpec.IgnoreFile,
PathParser: c.h.Conf.PathParser(),
WalkFn: walk,
})