More initialism corrections (golint)

Thanks to @bep's new, brilliant helpers.Deprecated() function,
the following functions or variables are transitioned to their
new names, preserving backward compatibility for v0.14
and warning the user of upcoming obsolescence in v0.15:

 * .Url → .URL (for node, menu and paginator)
 * .Site.BaseUrl → .Site.BaseURL
 * .Site.Indexes → .Site.Taxonomies
 * .Site.Recent → .Site.Pages
 * getJson → getJSON
 * getCsv → getCSV
 * safeHtml → safeHTML
 * safeCss → safeCSS
 * safeUrl → safeURL

Also fix related initialisms in strings and comments.

Continued effort in fixing #959.
This commit is contained in:
Anthony Fok
2015-03-17 22:16:54 -07:00
committed by bep
parent ca69cad8aa
commit 8b8fb417ae
20 changed files with 184 additions and 129 deletions

View File

@@ -93,15 +93,13 @@ type targetList struct {
}
type SiteInfo struct {
BaseUrl template.URL
BaseURL template.URL
Taxonomies TaxonomyList
Authors AuthorList
Social SiteSocial
Indexes *TaxonomyList // legacy, should be identical to Taxonomies
Sections Taxonomy
Pages *Pages
Files []*source.File
Recent *Pages // legacy, should be identical to Pages
Menus *Menus
Hugo *HugoInfo
Title string
@@ -133,6 +131,24 @@ type SiteInfo struct {
// linkedin
type SiteSocial map[string]string
// BaseUrl is deprecated. Will be removed in 0.15.
func (s *SiteInfo) BaseUrl() template.URL {
helpers.Deprecated("Site", ".BaseUrl", ".BaseURL")
return s.BaseURL
}
// Recent is deprecated. Will be removed in 0.15.
func (s *SiteInfo) Recent() *Pages {
helpers.Deprecated("Site", ".Recent", ".Pages")
return s.Pages
}
// Indexes is deprecated. Will be removed in 0.15.
func (s *SiteInfo) Indexes() *TaxonomyList {
helpers.Deprecated("Site", ".Indexes", ".Taxonomies")
return &s.Taxonomies
}
func (s *SiteInfo) GetParam(key string) interface{} {
v := s.Params[strings.ToLower(key)]
@@ -445,7 +461,7 @@ func (s *Site) initializeSiteInfo() {
}
s.Info = SiteInfo{
BaseUrl: template.URL(helpers.SanitizeURLKeepTrailingSlash(viper.GetString("BaseURL"))),
BaseURL: template.URL(helpers.SanitizeURLKeepTrailingSlash(viper.GetString("BaseURL"))),
Title: viper.GetString("Title"),
Author: viper.GetStringMap("author"),
LanguageCode: viper.GetString("languagecode"),
@@ -454,7 +470,6 @@ func (s *Site) initializeSiteInfo() {
BuildDrafts: viper.GetBool("BuildDrafts"),
canonifyURLs: viper.GetBool("CanonifyURLs"),
Pages: &s.Pages,
Recent: &s.Pages,
Menus: &s.Menus,
Params: params,
Permalinks: permalinks,
@@ -705,14 +720,14 @@ func (s *Site) getMenusFromConfig() Menus {
menuEntry.MarshallMap(ime)
if strings.HasPrefix(menuEntry.Url, "/") {
if strings.HasPrefix(menuEntry.URL, "/") {
// make it match the nodes
menuEntryURL := menuEntry.Url
menuEntryURL := menuEntry.URL
menuEntryURL = helpers.URLizeAndPrep(menuEntryURL)
if !s.Info.canonifyURLs {
menuEntryURL = helpers.AddContextRoot(string(s.Info.BaseUrl), menuEntryURL)
menuEntryURL = helpers.AddContextRoot(string(s.Info.BaseURL), menuEntryURL)
}
menuEntry.Url = menuEntryURL
menuEntry.URL = menuEntryURL
}
if ret[name] == nil {
@@ -764,8 +779,8 @@ func (s *Site) assembleMenus() {
for p, childmenu := range children {
_, ok := flat[twoD{p.MenuName, p.EntryName}]
if !ok {
// if parent does not exist, create one without a url
flat[twoD{p.MenuName, p.EntryName}] = &MenuEntry{Name: p.EntryName, Url: ""}
// if parent does not exist, create one without a URL
flat[twoD{p.MenuName, p.EntryName}] = &MenuEntry{Name: p.EntryName, URL: ""}
}
flat[twoD{p.MenuName, p.EntryName}].Children = childmenu
}
@@ -818,7 +833,6 @@ func (s *Site) assembleTaxonomies() {
}
s.Info.Taxonomies = s.Taxonomies
s.Info.Indexes = &s.Taxonomies
s.Info.Sections = s.Sections
}
@@ -1021,7 +1035,7 @@ func (s *Site) newTaxonomyNode(t taxRenderInfo) (*Node, string) {
base := t.plural + "/" + t.key
n := s.NewNode()
n.Title = strings.Replace(strings.Title(t.key), "-", " ", -1)
s.setUrls(n, base)
s.setURLs(n, base)
if len(t.pages) > 0 {
n.Date = t.pages[0].Page.Date
}
@@ -1081,7 +1095,7 @@ func taxonomyRenderer(s *Site, taxes <-chan taxRenderInfo, results chan<- error,
if !viper.GetBool("DisableRSS") {
// XML Feed
n.Url = s.permalinkStr(base + "/index.xml")
n.URL = s.permalinkStr(base + "/index.xml")
n.Permalink = s.permalink(base)
rssLayouts := []string{"taxonomy/" + t.singular + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
@@ -1099,7 +1113,7 @@ func (s *Site) RenderListsOfTaxonomyTerms() (err error) {
for singular, plural := range taxonomies {
n := s.NewNode()
n.Title = strings.Title(plural)
s.setUrls(n, plural)
s.setURLs(n, plural)
n.Data["Singular"] = singular
n.Data["Plural"] = plural
n.Data["Terms"] = s.Taxonomies[plural]
@@ -1125,7 +1139,7 @@ func (s *Site) newSectionListNode(section string, data WeightedPages) *Node {
} else {
n.Title = strings.Title(section)
}
s.setUrls(n, section)
s.setURLs(n, section)
n.Date = data[0].Page.Date
n.Data["Pages"] = data.Pages()
@@ -1175,7 +1189,7 @@ func (s *Site) RenderSectionLists() error {
if !viper.GetBool("DisableRSS") && section != "" {
// XML Feed
n.Url = s.permalinkStr(section + "/index.xml")
n.URL = s.permalinkStr(section + "/index.xml")
n.Permalink = s.permalink(section)
rssLayouts := []string{"section/" + section + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
if err := s.renderAndWriteXML("section "+section+" rss", section+"/index.xml", n, s.appendThemeTemplates(rssLayouts)...); err != nil {
@@ -1189,7 +1203,7 @@ func (s *Site) RenderSectionLists() error {
func (s *Site) newHomeNode() *Node {
n := s.NewNode()
n.Title = n.Site.Title
s.setUrls(n, "/")
s.setURLs(n, "/")
n.Data["Pages"] = s.Pages
return n
}
@@ -1232,7 +1246,7 @@ func (s *Site) RenderHomePage() error {
if !viper.GetBool("DisableRSS") {
// XML Feed
n.Url = s.permalinkStr("index.xml")
n.URL = s.permalinkStr("index.xml")
n.Title = ""
high := 50
if len(s.Pages) < high {
@@ -1250,7 +1264,7 @@ func (s *Site) RenderHomePage() error {
}
}
n.Url = helpers.URLize("404.html")
n.URL = helpers.URLize("404.html")
n.Title = "404 Page not found"
n.Permalink = s.permalink("404.html")
@@ -1277,7 +1291,7 @@ func (s *Site) RenderSitemap() error {
page := &Page{}
page.Date = s.Info.LastChange
page.Site = &s.Info
page.Url = "/"
page.URL = "/"
pages = append(pages, page)
pages = append(pages, s.Pages...)
@@ -1315,9 +1329,9 @@ func (s *Site) Stats() {
}
}
func (s *Site) setUrls(n *Node, in string) {
n.Url = helpers.URLizeAndPrep(in)
n.Permalink = s.permalink(n.Url)
func (s *Site) setURLs(n *Node, in string) {
n.URL = helpers.URLizeAndPrep(in)
n.Permalink = s.permalink(n.URL)
n.RSSLink = s.permalink(in + ".xml")
}