Source file based relative linking

ala GitHub repository markdown for both md files and non-md files

Signed-off-by: Sven Dowideit <SvenDowideit@home.org.au>
This commit is contained in:
Sven Dowideit
2015-09-09 10:03:38 +10:00
committed by Steve Francia
parent b78f13b041
commit 0f6b334b67
5 changed files with 452 additions and 34 deletions

View File

@@ -100,7 +100,7 @@ type SiteInfo struct {
Social SiteSocial
Sections Taxonomy
Pages *Pages
Files []*source.File
Files *[]*source.File
Menus *Menus
Hugo *HugoInfo
Title string
@@ -217,6 +217,166 @@ func (s *SiteInfo) RelRef(ref string, page *Page) (string, error) {
return s.refLink(ref, page, true)
}
func (s *SiteInfo) GitHub(ref string, page *Page) (string, error) {
return s.githubLink(ref, page, true)
}
func (s *SiteInfo) githubLink(ref string, currentPage *Page, relative bool) (string, error) {
var refURL *url.URL
var err error
// TODO can I make this a param to `hugo --use-github-links=/docs`?
// SVEN: add more tests - the prefix might be a real dir inside tho - add some pages that have it as a legitimate path
repositoryPathPrefix := "/docs"
refURL, err = url.Parse(strings.TrimPrefix(ref, repositoryPathPrefix))
if err != nil {
return "", err
}
if refURL.Scheme != "" {
// TODO: consider looking for http(s?)://github.com/user/project/prefix and replacing it - tho this may be intentional, so idk
//return "", fmt.Errorf("Not a plain filepath link (%s)", ref)
// Treat this as not an error, as the link is used as-is
return ref, nil
}
var target *Page
var link string
if refURL.Path != "" {
refPath := filepath.Clean(filepath.FromSlash(refURL.Path))
if strings.IndexRune(refPath, os.PathSeparator) == 0 { // filepath.IsAbs fails to me.
refPath = refPath[1:]
} else {
if currentPage != nil {
refPath = filepath.Join(currentPage.Source.Dir(), refURL.Path)
}
}
for _, page := range []*Page(*s.Pages) {
if page.Source.Path() == refPath {
target = page
break
}
}
// need to exhaust the test, then try with the others :/
// if the refPath doesn't end in a filename with extension `.md`, then try with `.md` , and then `/index.md`
mdPath := strings.TrimSuffix(refPath, string(os.PathSeparator)) + ".md"
for _, page := range []*Page(*s.Pages) {
if page.Source.Path() == mdPath {
target = page
break
}
}
indexPath := filepath.Join(refPath, "index.md")
for _, page := range []*Page(*s.Pages) {
if page.Source.Path() == indexPath {
target = page
break
}
}
if target == nil {
return "", fmt.Errorf("No page found for \"%s\" on page \"%s\".\n", ref, currentPage.Source.Path())
}
// SVEN: look at filepath.Rel() it might help, got the rel/non-rel url's (dangerous tho)
if relative {
link, err = target.RelPermalink()
} else {
link, err = target.Permalink()
}
if err != nil {
return "", err
}
}
// SVEN: add tests for github style relative fragments
if refURL.Fragment != "" {
link = link + "#" + refURL.Fragment
if refURL.Path != "" && target != nil && !target.getRenderingConfig().PlainIDAnchors {
link = link + ":" + target.UniqueID()
} else if currentPage != nil && !currentPage.getRenderingConfig().PlainIDAnchors {
link = link + ":" + currentPage.UniqueID()
}
}
return link, nil
}
func (s *SiteInfo) GitHubFileLink(ref string, page *Page) (string, error) {
return s.githubFileLink(ref, page, false)
}
// for non-pages in the site tree
func (s *SiteInfo) githubFileLink(ref string, currentPage *Page, relative bool) (string, error) {
var refURL *url.URL
var err error
// TODO can I make this a param to `hugo --use-github-links=/docs`?
// SVEN: add more tests - the prefix might be a real dir inside tho - add some pages that have it as a legitimate path
repositoryPathPrefix := "/docs"
refURL, err = url.Parse(strings.TrimPrefix(ref, repositoryPathPrefix))
if err != nil {
return "", err
}
if refURL.Scheme != "" {
// TODO: consider looking for http(s?)://github.com/user/project/prefix and replacing it - tho this may be intentional, so idk
//return "", fmt.Errorf("Not a plain filepath link (%s)", ref)
// Treat this as not an error, as the link is used as-is
return ref, nil
}
var target *source.File
var link string
if refURL.Path != "" {
refPath := filepath.Clean(filepath.FromSlash(refURL.Path))
if strings.IndexRune(refPath, os.PathSeparator) == 0 { // filepath.IsAbs fails to me.
refPath = refPath[1:]
} else {
if currentPage != nil {
refPath = filepath.Join(currentPage.Source.Dir(), refURL.Path)
}
}
for _, file := range []*source.File(*s.Files) {
if file.Path() == refPath {
target = file
break
}
}
if target == nil {
return "", fmt.Errorf("No file found for \"%s\" on page \"%s\".\n", ref, currentPage.Source.Path())
}
link = target.Path()
// SVEN: look at filepath.Rel() it might help, got the rel/non-rel url's (dangerous tho)
// SVEN: reconsider the fact I hardcoded the `relative` bool in both github resolvers
if relative {
return "./" + filepath.ToSlash(link), nil
} else {
return "/" + filepath.ToSlash(link), nil
}
if err != nil {
return "", err
}
return link, nil
}
return "", fmt.Errorf("failed to find a file to match \"%s\" on page \"%s\"", ref, currentPage.Source.Path())
}
func (s *SiteInfo) addToPaginationPageCount(cnt uint64) {
atomic.AddUint64(&s.paginationPageCount, cnt)
}
@@ -479,6 +639,7 @@ func (s *Site) initializeSiteInfo() {
canonifyURLs: viper.GetBool("CanonifyURLs"),
preserveTaxonomyNames: viper.GetBool("PreserveTaxonomyNames"),
Pages: &s.Pages,
Files: &s.Files,
Menus: &s.Menus,
Params: params,
Permalinks: permalinks,
@@ -1395,6 +1556,7 @@ func (s *Site) Stats() {
jww.FEEDBACK.Println(s.draftStats())
jww.FEEDBACK.Println(s.futureStats())
jww.FEEDBACK.Printf("%d pages created\n", len(s.Pages))
jww.FEEDBACK.Printf("%d non-page files copied\n", len(s.Files))
jww.FEEDBACK.Printf("%d paginator pages created\n", s.Info.paginationPageCount)
taxonomies := viper.GetStringMapString("Taxonomies")