mirror of
https://github.com/gohugoio/hugo.git
synced 2025-08-27 22:09:53 +02:00
adding hugo
This commit is contained in:
143
hugolib/config.go
Normal file
143
hugolib/config.go
Normal file
@@ -0,0 +1,143 @@
|
||||
// Copyright © 2013 Steve Francia <spf@spf13.com>.
|
||||
//
|
||||
// Licensed under the Simple Public License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://opensource.org/licenses/Simple-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// config file items
|
||||
type Config struct {
|
||||
SourceDir, PublishDir, BaseUrl, StaticDir string
|
||||
Path, CacheDir, LayoutDir, DefaultLayout string
|
||||
Indexes map[string]string // singular, plural
|
||||
ProcessFilters map[string][]string
|
||||
BuildDrafts bool
|
||||
}
|
||||
|
||||
var c Config
|
||||
|
||||
// Read cfgfile or setup defaults.
|
||||
func SetupConfig(cfgfile *string, path *string) *Config {
|
||||
c.setPath(*path)
|
||||
|
||||
configPath, err := c.findConfigFile(*cfgfile)
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("%v", err)
|
||||
fmt.Println(" using defaults instead")
|
||||
}
|
||||
|
||||
// set defaults
|
||||
|
||||
c.SourceDir = "content"
|
||||
c.LayoutDir = "layouts"
|
||||
c.PublishDir = "public"
|
||||
c.StaticDir = "static"
|
||||
c.DefaultLayout = "post"
|
||||
c.BuildDrafts = false
|
||||
|
||||
file, err := ioutil.ReadFile(configPath)
|
||||
if err == nil {
|
||||
if err := json.Unmarshal(file, &c); err != nil {
|
||||
fmt.Printf("Error parsing config: %s", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
// set index defaults if none provided
|
||||
if len(c.Indexes) == 0 {
|
||||
c.Indexes = make(map[string]string)
|
||||
c.Indexes["tag"] = "tags"
|
||||
c.Indexes["category"] = "categories"
|
||||
}
|
||||
return &c
|
||||
}
|
||||
|
||||
func (c *Config) setPath(p string) {
|
||||
if p == "" {
|
||||
path, err := FindPath()
|
||||
if err != nil {
|
||||
fmt.Printf("Error finding path: %s", err)
|
||||
}
|
||||
c.Path = path
|
||||
} else {
|
||||
path, err := filepath.Abs(p)
|
||||
if err != nil {
|
||||
fmt.Printf("Error finding path: %s", err)
|
||||
}
|
||||
c.Path = path
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Config) GetPath() string {
|
||||
if c.Path == "" {
|
||||
c.setPath("")
|
||||
}
|
||||
return c.Path
|
||||
}
|
||||
|
||||
func FindPath() (string, error) {
|
||||
serverFile, err := filepath.Abs(os.Args[0])
|
||||
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Can't get absolute path for executable: %v", err)
|
||||
}
|
||||
|
||||
path := filepath.Dir(serverFile)
|
||||
realFile, err := filepath.EvalSymlinks(serverFile)
|
||||
|
||||
if err != nil {
|
||||
if _, err = os.Stat(serverFile + ".exe"); err == nil {
|
||||
realFile = filepath.Clean(serverFile + ".exe")
|
||||
}
|
||||
}
|
||||
|
||||
if err == nil && realFile != serverFile {
|
||||
path = filepath.Dir(realFile)
|
||||
}
|
||||
|
||||
return path, nil
|
||||
}
|
||||
|
||||
func (c *Config) GetAbsPath(name string) string {
|
||||
if path.IsAbs(name) {
|
||||
return name
|
||||
}
|
||||
|
||||
p := filepath.Join(c.GetPath(), name)
|
||||
return p
|
||||
}
|
||||
|
||||
func (c *Config) findConfigFile(configFileName string) (string, error) {
|
||||
// If the full path is given, just use that
|
||||
if path.IsAbs(configFileName) {
|
||||
return configFileName, nil
|
||||
}
|
||||
|
||||
// Else check the local directory
|
||||
t := c.GetAbsPath(configFileName)
|
||||
if b, _ := exists(t); b {
|
||||
return t, nil
|
||||
} else {
|
||||
return "", fmt.Errorf("config file not found at: %s", t)
|
||||
}
|
||||
|
||||
return "", nil // This line won't ever happen.. looking forward to go 1.1 when I don't need it
|
||||
}
|
309
hugolib/helpers.go
Normal file
309
hugolib/helpers.go
Normal file
@@ -0,0 +1,309 @@
|
||||
// Copyright © 2013 Steve Francia <spf@spf13.com>.
|
||||
//
|
||||
// Licensed under the Simple Public License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://opensource.org/licenses/Simple-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/kr/pretty"
|
||||
"os"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var sanitizeRegexp = regexp.MustCompile("[^a-zA-Z0-9/_-]")
|
||||
|
||||
// TODO: Make these wrappers private
|
||||
// Wrapper around Fprintf taking verbose flag in account.
|
||||
func Printvf(format string, a ...interface{}) {
|
||||
//if *verbose {
|
||||
fmt.Fprintf(os.Stderr, format, a...)
|
||||
//}
|
||||
}
|
||||
|
||||
func Printer(x interface{}) {
|
||||
fmt.Printf("%#v", pretty.Formatter(x))
|
||||
fmt.Println("")
|
||||
}
|
||||
|
||||
// Wrapper around Fprintln taking verbose flag in account.
|
||||
func Printvln(a ...interface{}) {
|
||||
//if *verbose {
|
||||
fmt.Fprintln(os.Stderr, a...)
|
||||
//}
|
||||
}
|
||||
|
||||
func FatalErr(str string) {
|
||||
fmt.Println(str)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
func PrintErr(str string, a ...interface{}) {
|
||||
fmt.Fprintln(os.Stderr, str, a)
|
||||
}
|
||||
|
||||
func Error(str string, a ...interface{}) {
|
||||
fmt.Fprintln(os.Stderr, str, a)
|
||||
}
|
||||
|
||||
func interfaceToStringToDate(i interface{}) time.Time {
|
||||
s := interfaceToString(i)
|
||||
d, e := time.Parse("02 Jan 06 15:04 MST", s)
|
||||
|
||||
if e != nil {
|
||||
d, e = time.Parse("2006-01-02", s)
|
||||
}
|
||||
|
||||
if e != nil {
|
||||
d, e = time.Parse("02 Jan 06", s)
|
||||
}
|
||||
|
||||
return d
|
||||
|
||||
}
|
||||
|
||||
func interfaceToBool(i interface{}) bool {
|
||||
switch b := i.(type) {
|
||||
case bool:
|
||||
return b
|
||||
default:
|
||||
Error("Only Boolean values are supported for this JSON key")
|
||||
}
|
||||
|
||||
return false
|
||||
|
||||
}
|
||||
|
||||
func interfaceArrayToStringArray(i interface{}) []string {
|
||||
var a []string
|
||||
|
||||
switch vv := i.(type) {
|
||||
case []interface{}:
|
||||
for _, u := range vv {
|
||||
a = append(a, interfaceToString(u))
|
||||
}
|
||||
}
|
||||
|
||||
return a
|
||||
}
|
||||
|
||||
func interfaceToString(i interface{}) string {
|
||||
switch s := i.(type) {
|
||||
case string:
|
||||
return s
|
||||
default:
|
||||
Error("Only Strings are supported for this JSON key")
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
// Check if Exists && is Directory
|
||||
func dirExists(path string) (bool, error) {
|
||||
fi, err := os.Stat(path)
|
||||
if err == nil && fi.IsDir() {
|
||||
return true, nil
|
||||
}
|
||||
if os.IsNotExist(err) {
|
||||
return false, nil
|
||||
}
|
||||
return false, err
|
||||
}
|
||||
|
||||
// Check if File / Directory Exists
|
||||
func exists(path string) (bool, error) {
|
||||
_, err := os.Stat(path)
|
||||
if err == nil {
|
||||
return true, nil
|
||||
}
|
||||
if os.IsNotExist(err) {
|
||||
return false, nil
|
||||
}
|
||||
return false, err
|
||||
}
|
||||
|
||||
func mkdirIf(path string) {
|
||||
err := os.Mkdir(path, 0777)
|
||||
if err != nil && os.IsNotExist(err) {
|
||||
fmt.Println(err)
|
||||
}
|
||||
}
|
||||
|
||||
func Urlize(url string) string {
|
||||
return Sanitize(strings.ToLower(strings.Replace(strings.TrimSpace(url), " ", "-", -1)))
|
||||
}
|
||||
|
||||
func Gt(a interface{}, b interface{}) bool {
|
||||
var left, right int64
|
||||
av := reflect.ValueOf(a)
|
||||
|
||||
switch av.Kind() {
|
||||
case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice:
|
||||
left = int64(av.Len())
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
left = av.Int()
|
||||
case reflect.String:
|
||||
left, _ = strconv.ParseInt(av.String(), 10, 64)
|
||||
}
|
||||
|
||||
bv := reflect.ValueOf(b)
|
||||
|
||||
switch bv.Kind() {
|
||||
case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice:
|
||||
right = int64(bv.Len())
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
right = bv.Int()
|
||||
case reflect.String:
|
||||
right, _ = strconv.ParseInt(bv.String(), 10, 64)
|
||||
}
|
||||
|
||||
return left > right
|
||||
}
|
||||
|
||||
func IsSet(a interface{}, key interface{}) bool {
|
||||
av := reflect.ValueOf(a)
|
||||
kv := reflect.ValueOf(key)
|
||||
|
||||
switch av.Kind() {
|
||||
case reflect.Array, reflect.Chan, reflect.Slice:
|
||||
if int64(av.Len()) > kv.Int() {
|
||||
return true
|
||||
}
|
||||
case reflect.Map:
|
||||
if kv.Type() == av.Type().Key() {
|
||||
return av.MapIndex(kv).IsValid()
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func ReturnWhenSet(a interface{}, index int) interface{} {
|
||||
av := reflect.ValueOf(a)
|
||||
|
||||
switch av.Kind() {
|
||||
case reflect.Array, reflect.Slice:
|
||||
if av.Len() > index {
|
||||
|
||||
avv := av.Index(index)
|
||||
switch avv.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return avv.Int()
|
||||
case reflect.String:
|
||||
return avv.String()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func Sanitize(s string) string {
|
||||
return sanitizeRegexp.ReplaceAllString(s, "")
|
||||
}
|
||||
|
||||
func fileExt(path string) (file, ext string) {
|
||||
if strings.Contains(path, ".") {
|
||||
i := len(path) - 1
|
||||
for path[i] != '.' {
|
||||
i--
|
||||
}
|
||||
return path[:i], path[i+1:]
|
||||
}
|
||||
return path, ""
|
||||
}
|
||||
|
||||
func replaceExtension(path string, newExt string) string {
|
||||
f, _ := fileExt(path)
|
||||
return f + "." + newExt
|
||||
}
|
||||
|
||||
func TotalWords(s string) int {
|
||||
return len(strings.Fields(s))
|
||||
}
|
||||
|
||||
func WordCount(s string) map[string]int {
|
||||
m := make(map[string]int)
|
||||
for _, f := range strings.Fields(s) {
|
||||
m[f] += 1
|
||||
}
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
func StripHTML(s string) string {
|
||||
output := ""
|
||||
|
||||
// Shortcut strings with no tags in them
|
||||
if !strings.ContainsAny(s, "<>") {
|
||||
output = s
|
||||
} else {
|
||||
s = strings.Replace(s, "\n", " ", -1)
|
||||
s = strings.Replace(s, "</p>", " \n", -1)
|
||||
s = strings.Replace(s, "<br>", " \n", -1)
|
||||
s = strings.Replace(s, "</br>", " \n", -1)
|
||||
|
||||
// Walk through the string removing all tags
|
||||
b := new(bytes.Buffer)
|
||||
inTag := false
|
||||
for _, r := range s {
|
||||
switch r {
|
||||
case '<':
|
||||
inTag = true
|
||||
case '>':
|
||||
inTag = false
|
||||
default:
|
||||
if !inTag {
|
||||
b.WriteRune(r)
|
||||
}
|
||||
}
|
||||
}
|
||||
output = b.String()
|
||||
}
|
||||
return output
|
||||
}
|
||||
|
||||
func TruncateWords(s string, max int) string {
|
||||
words := strings.Fields(s)
|
||||
if max > len(words) {
|
||||
return strings.Join(words, " ")
|
||||
}
|
||||
|
||||
return strings.Join(words[:max], " ")
|
||||
}
|
||||
|
||||
func TruncateWordsToWholeSentence(s string, max int) string {
|
||||
words := strings.Fields(s)
|
||||
if max > len(words) {
|
||||
return strings.Join(words, " ")
|
||||
}
|
||||
|
||||
for counter, word := range words[max:] {
|
||||
if strings.HasSuffix(word, ".") ||
|
||||
strings.HasSuffix(word, "?") ||
|
||||
strings.HasSuffix(word, ".\"") ||
|
||||
strings.HasSuffix(word, "!") {
|
||||
return strings.Join(words[:max+counter+1], " ")
|
||||
}
|
||||
}
|
||||
|
||||
return strings.Join(words[:max], " ")
|
||||
}
|
||||
|
||||
func MakePermalink(domain string, path string) string {
|
||||
return strings.TrimRight(domain, "/") + "/" + strings.TrimLeft(path, "/")
|
||||
}
|
58
hugolib/index.go
Normal file
58
hugolib/index.go
Normal file
@@ -0,0 +1,58 @@
|
||||
// Copyright © 2013 Steve Francia <spf@spf13.com>.
|
||||
//
|
||||
// Licensed under the Simple Public License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://opensource.org/licenses/Simple-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"sort"
|
||||
)
|
||||
|
||||
type Index map[string]Pages
|
||||
type IndexList map[string]Index
|
||||
|
||||
type OrderedIndex []*Pages
|
||||
type OrderedIndexList map[string]OrderedIndex
|
||||
|
||||
// KeyPrep... Indexes should be case insensitive. Can make it easily conditional later.
|
||||
func kp(in string) string {
|
||||
return Urlize(in)
|
||||
}
|
||||
|
||||
func (i Index) Get(key string) Pages { return i[kp(key)] }
|
||||
func (i Index) Count(key string) int { return len(i[kp(key)]) }
|
||||
func (i Index) Add(key string, p *Page) {
|
||||
key = kp(key)
|
||||
i[key] = append(i[key], p)
|
||||
}
|
||||
|
||||
func (l IndexList) BuildOrderedIndexList() *OrderedIndexList {
|
||||
oil := make(OrderedIndexList, len(l))
|
||||
for idx_name, index := range l {
|
||||
i := 0
|
||||
oi := make(OrderedIndex, len(index))
|
||||
for _, e := range index {
|
||||
oi[i] = &e
|
||||
i++
|
||||
}
|
||||
oi.Sort()
|
||||
oil[idx_name] = oi
|
||||
}
|
||||
return &oil
|
||||
}
|
||||
|
||||
func (idx OrderedIndex) Len() int { return len(idx) }
|
||||
|
||||
func (idx OrderedIndex) Less(i, j int) bool { return len(*idx[i]) < len(*idx[j]) }
|
||||
func (idx OrderedIndex) Swap(i, j int) { idx[i], idx[j] = idx[j], idx[i] }
|
||||
func (idx OrderedIndex) Sort() { sort.Sort(idx) }
|
||||
func (idx OrderedIndex) Limit(n int) OrderedIndex { return idx[0:n] }
|
43
hugolib/node.go
Normal file
43
hugolib/node.go
Normal file
@@ -0,0 +1,43 @@
|
||||
// Copyright © 2013 Steve Francia <spf@spf13.com>.
|
||||
//
|
||||
// Licensed under the Simple Public License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://opensource.org/licenses/Simple-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"html/template"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Node struct {
|
||||
Url string
|
||||
Permalink template.HTML
|
||||
RSSlink template.HTML
|
||||
Site SiteInfo
|
||||
layout string
|
||||
Data map[string]interface{}
|
||||
Section string
|
||||
Slug string
|
||||
Title string
|
||||
Description string
|
||||
Keywords []string
|
||||
Date time.Time
|
||||
}
|
||||
|
||||
func (n *Node) GetSection() string {
|
||||
s := ""
|
||||
if n.Section != "" {
|
||||
s = n.Section
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
381
hugolib/page.go
Normal file
381
hugolib/page.go
Normal file
@@ -0,0 +1,381 @@
|
||||
// Copyright © 2013 Steve Francia <spf@spf13.com>.
|
||||
//
|
||||
// Licensed under the Simple Public License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://opensource.org/licenses/Simple-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/theplant/blackfriday"
|
||||
"html/template"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var _ = filepath.Base("")
|
||||
|
||||
type Page struct {
|
||||
Status string
|
||||
Images []string
|
||||
Content template.HTML
|
||||
Summary template.HTML
|
||||
RawMarkdown string // TODO should be []byte
|
||||
Params map[string]interface{}
|
||||
RenderedContent *bytes.Buffer
|
||||
contentType string
|
||||
Draft bool
|
||||
Tmpl *template.Template
|
||||
PageMeta
|
||||
File
|
||||
Position
|
||||
Node
|
||||
}
|
||||
|
||||
const summaryLength = 70
|
||||
|
||||
type File struct {
|
||||
FileName, OutFile, Extension string
|
||||
}
|
||||
|
||||
type PageMeta struct {
|
||||
WordCount int
|
||||
FuzzyWordCount int
|
||||
}
|
||||
|
||||
type Position struct {
|
||||
Prev *Page
|
||||
Next *Page
|
||||
}
|
||||
|
||||
type Pages []*Page
|
||||
|
||||
func (p Pages) Len() int { return len(p) }
|
||||
func (p Pages) Less(i, j int) bool { return p[i].Date.Unix() > p[j].Date.Unix() }
|
||||
func (p Pages) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
||||
|
||||
// TODO eliminate unnecessary things
|
||||
func (p Pages) Sort() { sort.Sort(p) }
|
||||
func (p Pages) Limit(n int) Pages { return p[0:n] }
|
||||
|
||||
func initializePage(filename string) (page Page) {
|
||||
page = Page{}
|
||||
page.Date, _ = time.Parse("20060102", "20080101")
|
||||
page.FileName = filename
|
||||
page.contentType = ""
|
||||
page.Extension = "html"
|
||||
page.Params = make(map[string]interface{})
|
||||
page.Keywords = make([]string, 10, 30)
|
||||
page.setSection()
|
||||
|
||||
return page
|
||||
}
|
||||
|
||||
func (p *Page) setSection() {
|
||||
x := strings.Split(p.FileName, "/")
|
||||
|
||||
if section := x[len(x)-2]; section != "content" {
|
||||
p.Section = section
|
||||
}
|
||||
}
|
||||
|
||||
func (page *Page) Type() string {
|
||||
if page.contentType != "" {
|
||||
return page.contentType
|
||||
}
|
||||
|
||||
if x := page.GetSection(); x != "" {
|
||||
return x
|
||||
}
|
||||
|
||||
return "page"
|
||||
}
|
||||
|
||||
func (page *Page) Layout(l ...string) string {
|
||||
layout := ""
|
||||
if len(l) == 0 {
|
||||
layout = "single"
|
||||
} else {
|
||||
layout = l[0]
|
||||
}
|
||||
|
||||
if x := page.layout; x != "" {
|
||||
return x
|
||||
}
|
||||
|
||||
return strings.ToLower(page.Type()) + "/" + layout + ".html"
|
||||
}
|
||||
|
||||
// TODO should return errors as well
|
||||
// TODO new page should return just a page
|
||||
// TODO initalize separately... load from reader (file, or []byte)
|
||||
func NewPage(filename string) *Page {
|
||||
p := initializePage(filename)
|
||||
if err := p.buildPageFromFile(); err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
p.analyzePage()
|
||||
|
||||
return &p
|
||||
}
|
||||
|
||||
func (p *Page) analyzePage() {
|
||||
p.WordCount = TotalWords(p.RawMarkdown)
|
||||
p.FuzzyWordCount = int((p.WordCount+100)/100) * 100
|
||||
}
|
||||
|
||||
// TODO //rewrite to use byte methods instead
|
||||
func (page *Page) parseJsonMetaData(data []byte) ([]string, error) {
|
||||
var err error
|
||||
|
||||
lines := strings.Split(string(data), "\n")
|
||||
datum := lines[0:]
|
||||
|
||||
// go through content parse between "{" and "}"
|
||||
// must be on their own lines (for now)
|
||||
var found = 0
|
||||
for i, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
|
||||
if line == "{" {
|
||||
found += 1
|
||||
}
|
||||
|
||||
if line == "}" {
|
||||
found -= 1
|
||||
}
|
||||
|
||||
if found == 0 {
|
||||
datum = lines[0 : i+1]
|
||||
lines = lines[i+1:]
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
err = page.handleJsonMetaData([]byte(strings.Join(datum, "\n")))
|
||||
|
||||
return lines, err
|
||||
}
|
||||
|
||||
func (p *Page) Permalink() template.HTML {
|
||||
if len(strings.TrimSpace(p.Slug)) > 0 {
|
||||
return template.HTML(MakePermalink(string(p.Site.BaseUrl), strings.TrimSpace(p.Section)+"/"+p.Slug))
|
||||
} else if len(strings.TrimSpace(p.Url)) > 2 {
|
||||
return template.HTML(MakePermalink(string(p.Site.BaseUrl), strings.TrimSpace(p.Url)))
|
||||
} else {
|
||||
_, t := filepath.Split(p.FileName)
|
||||
x := replaceExtension(strings.TrimSpace(t), p.Extension)
|
||||
return template.HTML(MakePermalink(string(p.Site.BaseUrl), strings.TrimSpace(p.Section)+"/"+x))
|
||||
}
|
||||
}
|
||||
|
||||
func (page *Page) handleJsonMetaData(datum []byte) error {
|
||||
var f interface{}
|
||||
if err := json.Unmarshal(datum, &f); err != nil {
|
||||
return fmt.Errorf("Invalide JSON in $v \nError parsing page meta data: %s", page.FileName, err)
|
||||
}
|
||||
|
||||
m := f.(map[string]interface{})
|
||||
|
||||
for k, v := range m {
|
||||
switch strings.ToLower(k) {
|
||||
case "title":
|
||||
page.Title = interfaceToString(v)
|
||||
case "description":
|
||||
page.Description = interfaceToString(v)
|
||||
case "slug":
|
||||
page.Slug = Urlize(interfaceToString(v))
|
||||
case "url":
|
||||
if url := interfaceToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
|
||||
return fmt.Errorf("Only relative urls are supported, %v provided", url)
|
||||
}
|
||||
page.Url = Urlize(interfaceToString(v))
|
||||
case "type":
|
||||
page.contentType = interfaceToString(v)
|
||||
case "keywords":
|
||||
page.Keywords = interfaceArrayToStringArray(v)
|
||||
case "date", "pubdate":
|
||||
page.Date = interfaceToStringToDate(v)
|
||||
case "draft":
|
||||
page.Draft = interfaceToBool(v)
|
||||
case "layout":
|
||||
page.layout = interfaceToString(v)
|
||||
case "status":
|
||||
page.Status = interfaceToString(v)
|
||||
default:
|
||||
// If not one of the explicit values, store in Params
|
||||
//fmt.Println(strings.ToLower(k))
|
||||
switch vv := v.(type) {
|
||||
case string: // handle string values
|
||||
page.Params[strings.ToLower(k)] = vv
|
||||
default: // handle array of strings as well
|
||||
switch vvv := vv.(type) {
|
||||
case []interface{}:
|
||||
var a = make([]string, len(vvv))
|
||||
for i, u := range vvv {
|
||||
a[i] = interfaceToString(u)
|
||||
}
|
||||
page.Params[strings.ToLower(k)] = a
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
//Printer(page.Params)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (page *Page) GetParam(key string) interface{} {
|
||||
v := page.Params[strings.ToLower(key)]
|
||||
|
||||
if v == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch v.(type) {
|
||||
case string:
|
||||
return interfaceToString(v)
|
||||
case []string:
|
||||
return v
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (page *Page) parseFileMetaData(data []byte) ([]string, error) {
|
||||
lines := strings.Split(string(data), "\n")
|
||||
|
||||
// go through content parse from --- to ---
|
||||
var found = 0
|
||||
for i, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
|
||||
if found == 1 {
|
||||
// parse line for param
|
||||
colonIndex := strings.Index(line, ":")
|
||||
if colonIndex > 0 {
|
||||
key := strings.TrimSpace(line[:colonIndex])
|
||||
value := strings.TrimSpace(line[colonIndex+1:])
|
||||
value = strings.Trim(value, "\"") //remove quotes
|
||||
switch key {
|
||||
case "title":
|
||||
page.Title = value
|
||||
case "layout":
|
||||
page.layout = value
|
||||
case "extension":
|
||||
page.Extension = "." + value
|
||||
default:
|
||||
page.Params[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
} else if found >= 2 {
|
||||
// params over
|
||||
lines = lines[i:]
|
||||
break
|
||||
}
|
||||
|
||||
if line == "---" {
|
||||
found += 1
|
||||
}
|
||||
}
|
||||
|
||||
return lines, nil
|
||||
}
|
||||
|
||||
func (page *Page) Err(message string) {
|
||||
fmt.Println(page.FileName + " : " + message)
|
||||
}
|
||||
|
||||
// TODO return error on last line instead of nil
|
||||
func (page *Page) parseFileHeading(data []byte) ([]string, error) {
|
||||
if len(data) == 0 {
|
||||
page.Err("Empty File, skipping")
|
||||
} else {
|
||||
if data[0] == '-' {
|
||||
return page.parseFileMetaData(data)
|
||||
}
|
||||
return page.parseJsonMetaData(data)
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (p *Page) Render(layout ...string) template.HTML {
|
||||
curLayout := ""
|
||||
|
||||
if len(layout) > 0 {
|
||||
curLayout = layout[0]
|
||||
}
|
||||
|
||||
return template.HTML(string(p.ExecuteTemplate(curLayout).Bytes()))
|
||||
}
|
||||
|
||||
func (p *Page) ExecuteTemplate(layout string) *bytes.Buffer {
|
||||
l := p.Layout(layout)
|
||||
buffer := new(bytes.Buffer)
|
||||
p.Tmpl.ExecuteTemplate(buffer, l, p)
|
||||
return buffer
|
||||
}
|
||||
|
||||
func (page *Page) readFile() []byte {
|
||||
var data, err = ioutil.ReadFile(page.FileName)
|
||||
if err != nil {
|
||||
PrintErr("Error Reading: " + page.FileName)
|
||||
return nil
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
func (page *Page) buildPageFromFile() error {
|
||||
data := page.readFile()
|
||||
|
||||
content, err := page.parseFileHeading(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := page.setOutFile(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
page.convertMarkdown(content)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Page) setOutFile() error {
|
||||
if len(strings.TrimSpace(p.Slug)) > 0 {
|
||||
// Use Slug if provided
|
||||
p.OutFile = strings.TrimSpace(p.Slug + "." + p.Extension)
|
||||
} else if len(strings.TrimSpace(p.Url)) > 2 {
|
||||
// Use Url if provided & Slug missing
|
||||
p.OutFile = strings.TrimSpace(p.Url)
|
||||
} else {
|
||||
// Fall back to filename
|
||||
_, t := filepath.Split(p.FileName)
|
||||
p.OutFile = replaceExtension(strings.TrimSpace(t), p.Extension)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (page *Page) convertMarkdown(lines []string) {
|
||||
|
||||
page.RawMarkdown = strings.Join(lines, "\n")
|
||||
content := string(blackfriday.MarkdownCommon([]byte(page.RawMarkdown)))
|
||||
page.Content = template.HTML(content)
|
||||
page.Summary = template.HTML(TruncateWordsToWholeSentence(StripHTML(StripShortcodes(content)), summaryLength))
|
||||
}
|
131
hugolib/shortcode.go
Normal file
131
hugolib/shortcode.go
Normal file
@@ -0,0 +1,131 @@
|
||||
// Copyright © 2013 Steve Francia <spf@spf13.com>.
|
||||
//
|
||||
// Licensed under the Simple Public License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://opensource.org/licenses/Simple-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"strings"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
var _ = fmt.Println
|
||||
|
||||
type ShortcodeFunc func([]string) string
|
||||
|
||||
type Shortcode struct {
|
||||
Name string
|
||||
Func ShortcodeFunc
|
||||
}
|
||||
|
||||
type ShortcodeWithPage struct {
|
||||
Params interface{}
|
||||
Page *Page
|
||||
}
|
||||
|
||||
type Shortcodes map[string]ShortcodeFunc
|
||||
|
||||
func ShortcodesHandle(stringToParse string, p *Page, t *template.Template) string {
|
||||
posStart := strings.Index(stringToParse, "{{%")
|
||||
if posStart > 0 {
|
||||
posEnd := strings.Index(stringToParse[posStart:], "%}}") + posStart
|
||||
if posEnd > posStart {
|
||||
name, par := SplitParams(stringToParse[posStart+3 : posEnd])
|
||||
params := Tokenize(par)
|
||||
var data = &ShortcodeWithPage{Params: params, Page: p}
|
||||
newString := stringToParse[:posStart] + ShortcodeRender(name, data, t) + ShortcodesHandle(stringToParse[posEnd+3:], p, t)
|
||||
return newString
|
||||
}
|
||||
}
|
||||
return stringToParse
|
||||
}
|
||||
|
||||
func StripShortcodes(stringToParse string) string {
|
||||
posStart := strings.Index(stringToParse, "{{%")
|
||||
if posStart > 0 {
|
||||
posEnd := strings.Index(stringToParse[posStart:], "%}}") + posStart
|
||||
if posEnd > posStart {
|
||||
newString := stringToParse[:posStart] + StripShortcodes(stringToParse[posEnd+3:])
|
||||
return newString
|
||||
}
|
||||
}
|
||||
return stringToParse
|
||||
}
|
||||
|
||||
func Tokenize(in string) interface{} {
|
||||
first := strings.Fields(in)
|
||||
var final = make([]string, 0)
|
||||
var keys = make([]string, 0)
|
||||
inQuote := false
|
||||
start := 0
|
||||
|
||||
for i, v := range first {
|
||||
index := strings.Index(v, "=")
|
||||
|
||||
if !inQuote {
|
||||
if index > 1 {
|
||||
keys = append(keys, v[:index])
|
||||
v = v[index+1:]
|
||||
}
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(v, "“") && !inQuote {
|
||||
final = append(final, v)
|
||||
} else if inQuote && strings.HasSuffix(v, "”") && !strings.HasSuffix(v, "\\\"") {
|
||||
first[i] = v[:len(v)-7]
|
||||
final = append(final, strings.Join(first[start:i+1], " "))
|
||||
inQuote = false
|
||||
} else if strings.HasPrefix(v, "“") && !inQuote {
|
||||
if strings.HasSuffix(v, "”") {
|
||||
final = append(final, v[7:len(v)-7])
|
||||
} else {
|
||||
start = i
|
||||
first[i] = v[7:]
|
||||
inQuote = true
|
||||
}
|
||||
}
|
||||
|
||||
// No closing "... just make remainder the final token
|
||||
if inQuote && i == len(first) {
|
||||
final = append(final, first[start:len(first)]...)
|
||||
}
|
||||
}
|
||||
|
||||
if len(keys) > 0 {
|
||||
var m = make(map[string]string)
|
||||
for i, k := range keys {
|
||||
m[k] = final[i]
|
||||
}
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
return final
|
||||
}
|
||||
|
||||
func SplitParams(in string) (name string, par2 string) {
|
||||
i := strings.IndexFunc(strings.TrimSpace(in), unicode.IsSpace)
|
||||
if i < 1 {
|
||||
return strings.TrimSpace(in), ""
|
||||
}
|
||||
|
||||
return strings.TrimSpace(in[:i+1]), strings.TrimSpace(in[i+1:])
|
||||
}
|
||||
|
||||
func ShortcodeRender(name string, data *ShortcodeWithPage, t *template.Template) string {
|
||||
buffer := new(bytes.Buffer)
|
||||
t.ExecuteTemplate(buffer, "shortcodes/"+name+".html", data)
|
||||
return buffer.String()
|
||||
}
|
362
hugolib/site.go
Normal file
362
hugolib/site.go
Normal file
@@ -0,0 +1,362 @@
|
||||
// Copyright © 2013 Steve Francia <spf@spf13.com>.
|
||||
//
|
||||
// Licensed under the Simple Public License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://opensource.org/licenses/Simple-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"bitbucket.org/pkg/inflect"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/spf13/nitro"
|
||||
"html/template"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
//"sync"
|
||||
)
|
||||
|
||||
type Site struct {
|
||||
c Config
|
||||
Pages Pages
|
||||
Tmpl *template.Template
|
||||
Indexes IndexList
|
||||
Files []string
|
||||
Directories []string
|
||||
Sections Index
|
||||
Info SiteInfo
|
||||
Shortcodes map[string]ShortcodeFunc
|
||||
timer *nitro.B
|
||||
}
|
||||
|
||||
type SiteInfo struct {
|
||||
BaseUrl template.URL
|
||||
Indexes *OrderedIndexList
|
||||
Recent *Pages
|
||||
LastChange time.Time
|
||||
}
|
||||
|
||||
func (s *Site) getFromIndex(kind string, name string) Pages {
|
||||
return s.Indexes[kind][name]
|
||||
}
|
||||
|
||||
func NewSite(config *Config) *Site {
|
||||
return &Site{c: *config, timer: nitro.Initalize()}
|
||||
}
|
||||
|
||||
func (site *Site) Build() {
|
||||
site.Process()
|
||||
site.Render()
|
||||
site.Write()
|
||||
}
|
||||
|
||||
func (site *Site) Analyze() {
|
||||
site.Process()
|
||||
site.checkDescriptions()
|
||||
}
|
||||
|
||||
func (site *Site) Process() {
|
||||
site.initialize()
|
||||
site.prepTemplates()
|
||||
site.timer.Step("initialize & template prep")
|
||||
site.CreatePages()
|
||||
site.timer.Step("import pages")
|
||||
site.BuildSiteMeta()
|
||||
site.timer.Step("build indexes")
|
||||
}
|
||||
|
||||
func (site *Site) Render() {
|
||||
site.RenderIndexes()
|
||||
site.timer.Step("render and write indexes")
|
||||
site.RenderLists()
|
||||
site.timer.Step("render and write lists")
|
||||
site.RenderPages()
|
||||
site.timer.Step("render pages")
|
||||
site.ProcessShortcodes()
|
||||
site.timer.Step("render shortcodes")
|
||||
site.RenderHomePage()
|
||||
site.timer.Step("render and write homepage")
|
||||
}
|
||||
|
||||
func (site *Site) Write() {
|
||||
site.WritePages()
|
||||
site.timer.Step("write pages")
|
||||
}
|
||||
|
||||
func (site *Site) checkDescriptions() {
|
||||
for _, p := range site.Pages {
|
||||
if len(p.Description) < 60 {
|
||||
fmt.Print(p.FileName + " ")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Site) prepTemplates() {
|
||||
var templates = template.New("")
|
||||
|
||||
funcMap := template.FuncMap{
|
||||
"urlize": Urlize,
|
||||
"gt": Gt,
|
||||
"isset": IsSet,
|
||||
"echoParam": ReturnWhenSet,
|
||||
}
|
||||
|
||||
templates.Funcs(funcMap)
|
||||
|
||||
walker := func(path string, fi os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
PrintErr("Walker: ", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
if !fi.IsDir() {
|
||||
filetext, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
text := string(filetext)
|
||||
name := path[len(s.c.GetAbsPath(s.c.LayoutDir))+1:]
|
||||
t := templates.New(name)
|
||||
template.Must(t.Parse(text))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
filepath.Walk(s.c.GetAbsPath(s.c.LayoutDir), walker)
|
||||
|
||||
s.Tmpl = templates
|
||||
}
|
||||
|
||||
func (s *Site) initialize() {
|
||||
site := s
|
||||
|
||||
s.checkDirectories()
|
||||
|
||||
walker := func(path string, fi os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
PrintErr("Walker: ", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
if fi.IsDir() {
|
||||
site.Directories = append(site.Directories, path)
|
||||
return nil
|
||||
} else {
|
||||
site.Files = append(site.Files, path)
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
filepath.Walk(s.c.GetAbsPath(s.c.SourceDir), walker)
|
||||
|
||||
s.Info = SiteInfo{BaseUrl: template.URL(s.c.BaseUrl)}
|
||||
|
||||
s.Shortcodes = make(map[string]ShortcodeFunc)
|
||||
}
|
||||
|
||||
func (s *Site) checkDirectories() {
|
||||
if b, _ := dirExists(s.c.GetAbsPath(s.c.LayoutDir)); !b {
|
||||
FatalErr("No layout directory found, expecting to find it at " + s.c.GetAbsPath(s.c.LayoutDir))
|
||||
}
|
||||
if b, _ := dirExists(s.c.GetAbsPath(s.c.SourceDir)); !b {
|
||||
FatalErr("No source directory found, expecting to find it at " + s.c.GetAbsPath(s.c.SourceDir))
|
||||
}
|
||||
mkdirIf(s.c.GetAbsPath(s.c.PublishDir))
|
||||
}
|
||||
|
||||
func (s *Site) ProcessShortcodes() {
|
||||
for i, _ := range s.Pages {
|
||||
var bb bytes.Buffer
|
||||
bb.WriteString(ShortcodesHandle(s.Pages[i].RenderedContent.String(), s.Pages[i], s.Tmpl))
|
||||
s.Pages[i].RenderedContent = &bb
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Site) CreatePages() {
|
||||
for _, fileName := range s.Files {
|
||||
page := NewPage(fileName)
|
||||
page.Site = s.Info
|
||||
page.Tmpl = s.Tmpl
|
||||
if s.c.BuildDrafts || !page.Draft {
|
||||
s.Pages = append(s.Pages, page)
|
||||
}
|
||||
}
|
||||
|
||||
s.Pages.Sort()
|
||||
}
|
||||
|
||||
func (s *Site) BuildSiteMeta() {
|
||||
s.Indexes = make(IndexList)
|
||||
s.Sections = make(Index)
|
||||
|
||||
for _, plural := range s.c.Indexes {
|
||||
s.Indexes[plural] = make(Index)
|
||||
for i, p := range s.Pages {
|
||||
vals := p.GetParam(plural)
|
||||
|
||||
if vals != nil {
|
||||
for _, idx := range vals.([]string) {
|
||||
s.Indexes[plural].Add(idx, s.Pages[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
for k, _ := range s.Indexes[plural] {
|
||||
s.Indexes[plural][k].Sort()
|
||||
}
|
||||
}
|
||||
|
||||
for i, p := range s.Pages {
|
||||
sect := p.GetSection()
|
||||
s.Sections.Add(sect, s.Pages[i])
|
||||
}
|
||||
|
||||
for k, _ := range s.Sections {
|
||||
s.Sections[k].Sort()
|
||||
}
|
||||
|
||||
s.Info.Indexes = s.Indexes.BuildOrderedIndexList()
|
||||
|
||||
s.Info.LastChange = s.Pages[0].Date
|
||||
}
|
||||
|
||||
func (s *Site) RenderPages() {
|
||||
for i, _ := range s.Pages {
|
||||
s.Pages[i].RenderedContent = s.RenderThing(s.Pages[i], s.Pages[i].Layout())
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Site) WritePages() {
|
||||
for _, p := range s.Pages {
|
||||
s.WritePublic(p.Section, p.OutFile, p.RenderedContent.Bytes())
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Site) RenderIndexes() {
|
||||
for singular, plural := range s.c.Indexes {
|
||||
for k, o := range s.Indexes[plural] {
|
||||
n := s.NewNode()
|
||||
n.Title = strings.Title(k)
|
||||
url := Urlize(plural + "/" + k)
|
||||
n.Url = url + ".html"
|
||||
n.Permalink = template.HTML(MakePermalink(string(n.Site.BaseUrl), string(n.Url)))
|
||||
n.RSSlink = template.HTML(MakePermalink(string(n.Site.BaseUrl), string(url+".xml")))
|
||||
n.Date = o[0].Date
|
||||
n.Data[singular] = o
|
||||
n.Data["Pages"] = o
|
||||
layout := "indexes/" + singular + ".html"
|
||||
|
||||
x := s.RenderThing(n, layout)
|
||||
s.WritePublic(plural, k+".html", x.Bytes())
|
||||
|
||||
if a := s.Tmpl.Lookup("rss.xml"); a != nil {
|
||||
// XML Feed
|
||||
y := s.NewXMLBuffer()
|
||||
n.Url = Urlize(plural + "/" + k + ".xml")
|
||||
s.Tmpl.ExecuteTemplate(y, "rss.xml", n)
|
||||
s.WritePublic(plural, k+".xml", y.Bytes())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Site) RenderLists() {
|
||||
for section, data := range s.Sections {
|
||||
n := s.NewNode()
|
||||
n.Title = strings.Title(inflect.Pluralize(section))
|
||||
n.Url = Urlize(section + "/index.html")
|
||||
n.Permalink = template.HTML(MakePermalink(string(n.Site.BaseUrl), string(n.Url)))
|
||||
n.RSSlink = template.HTML(MakePermalink(string(n.Site.BaseUrl), string(section+"/index.xml")))
|
||||
n.Date = data[0].Date
|
||||
n.Data["Pages"] = data
|
||||
layout := "indexes/" + section + ".html"
|
||||
|
||||
x := s.RenderThing(n, layout)
|
||||
s.WritePublic(section, "index.html", x.Bytes())
|
||||
|
||||
if a := s.Tmpl.Lookup("rss.xml"); a != nil {
|
||||
// XML Feed
|
||||
n.Url = Urlize(section + "/index.xml")
|
||||
y := s.NewXMLBuffer()
|
||||
s.Tmpl.ExecuteTemplate(y, "rss.xml", n)
|
||||
s.WritePublic(section, "index.xml", y.Bytes())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Site) RenderHomePage() {
|
||||
n := s.NewNode()
|
||||
n.Title = ""
|
||||
n.Url = Urlize(string(n.Site.BaseUrl))
|
||||
n.RSSlink = template.HTML(MakePermalink(string(n.Site.BaseUrl), string("/index.xml")))
|
||||
n.Permalink = template.HTML(string(n.Site.BaseUrl))
|
||||
n.Date = s.Pages[0].Date
|
||||
if len(s.Pages) < 9 {
|
||||
n.Data["Pages"] = s.Pages
|
||||
} else {
|
||||
n.Data["Pages"] = s.Pages[:9]
|
||||
}
|
||||
x := s.RenderThing(n, "index.html")
|
||||
s.WritePublic("", "index.html", x.Bytes())
|
||||
|
||||
if a := s.Tmpl.Lookup("rss.xml"); a != nil {
|
||||
// XML Feed
|
||||
n.Url = Urlize("index.xml")
|
||||
y := s.NewXMLBuffer()
|
||||
s.Tmpl.ExecuteTemplate(y, "rss.xml", n)
|
||||
s.WritePublic("", "index.xml", y.Bytes())
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Site) Stats() {
|
||||
fmt.Printf("%d pages created \n", len(s.Pages))
|
||||
for _, pl := range s.c.Indexes {
|
||||
fmt.Printf("%d %s created\n", len(s.Indexes[pl]), pl)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Site) NewNode() Node {
|
||||
var y Node
|
||||
y.Data = make(map[string]interface{})
|
||||
y.Site = s.Info
|
||||
|
||||
return y
|
||||
}
|
||||
|
||||
func (s *Site) RenderThing(d interface{}, layout string) *bytes.Buffer {
|
||||
buffer := new(bytes.Buffer)
|
||||
s.Tmpl.ExecuteTemplate(buffer, layout, d)
|
||||
return buffer
|
||||
}
|
||||
|
||||
func (s *Site) NewXMLBuffer() *bytes.Buffer {
|
||||
header := "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>\n"
|
||||
return bytes.NewBufferString(header)
|
||||
}
|
||||
|
||||
func (s *Site) WritePublic(path string, filename string, content []byte) {
|
||||
AbsPath := ""
|
||||
if path != "" {
|
||||
// TODO double check the following line.. calling GetAbsPath 2x seems wrong
|
||||
mkdirIf(s.c.GetAbsPath(filepath.Join(s.c.GetAbsPath(s.c.PublishDir), path)))
|
||||
AbsPath = filepath.Join(s.c.GetAbsPath(s.c.PublishDir), path, filename)
|
||||
} else {
|
||||
AbsPath = filepath.Join(s.c.GetAbsPath(s.c.PublishDir), filename)
|
||||
}
|
||||
|
||||
file, _ := os.Create(AbsPath)
|
||||
defer file.Close()
|
||||
|
||||
file.Write(content)
|
||||
}
|
Reference in New Issue
Block a user