Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/gohugoio/hugo.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>2019-01-02 14:33:26 +0300
committerBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>2019-03-23 20:51:22 +0300
commit597e418cb02883418f2cebb41400e8e61413f651 (patch)
tree177ad9c540b2583b6dab138c9f0490d28989c7f7 /resources/page
parent44f5c1c14cb1f42cc5f01739c289e9cfc83602af (diff)
Make Page an interface
The main motivation of this commit is to add a `page.Page` interface to replace the very file-oriented `hugolib.Page` struct. This is all a preparation step for issue #5074, "pages from other data sources". But this also fixes a set of annoying limitations, especially related to custom output formats, and shortcodes. Most notable changes: * The inner content of shortcodes using the `{{%` as the outer-most delimiter will now be sent to the content renderer, e.g. Blackfriday. This means that any markdown will partake in the global ToC and footnote context etc. * The Custom Output formats are now "fully virtualized". This removes many of the current limitations. * The taxonomy list type now has a reference to the `Page` object. This improves the taxonomy template `.Title` situation and make common template constructs much simpler. See #5074 Fixes #5763 Fixes #5758 Fixes #5090 Fixes #5204 Fixes #4695 Fixes #5607 Fixes #5707 Fixes #5719 Fixes #3113 Fixes #5706 Fixes #5767 Fixes #5723 Fixes #5769 Fixes #5770 Fixes #5771 Fixes #5759 Fixes #5776 Fixes #5777 Fixes #5778
Diffstat (limited to 'resources/page')
-rw-r--r--resources/page/page.go365
-rw-r--r--resources/page/page_author.go45
-rw-r--r--resources/page/page_data.go42
-rw-r--r--resources/page/page_data_test.go57
-rw-r--r--resources/page/page_generate/.gitignore1
-rw-r--r--resources/page/page_generate/generate_page_wrappers.go212
-rw-r--r--resources/page/page_kinds.go25
-rw-r--r--resources/page/page_kinds_test.go31
-rw-r--r--resources/page/page_marshaljson.autogen.go198
-rw-r--r--resources/page/page_nop.go463
-rw-r--r--resources/page/page_outputformat.go85
-rw-r--r--resources/page/page_paths.go334
-rw-r--r--resources/page/page_paths_test.go258
-rw-r--r--resources/page/page_wrappers.autogen.go97
-rw-r--r--resources/page/pagegroup.go369
-rw-r--r--resources/page/pagegroup_test.go409
-rw-r--r--resources/page/pagemeta/page_frontmatter.go427
-rw-r--r--resources/page/pagemeta/page_frontmatter_test.go262
-rw-r--r--resources/page/pagemeta/pagemeta.go21
-rw-r--r--resources/page/pages.go115
-rw-r--r--resources/page/pages_cache.go136
-rw-r--r--resources/page/pages_cache_test.go86
-rw-r--r--resources/page/pages_language_merge.go64
-rw-r--r--resources/page/pages_prev_next.go42
-rw-r--r--resources/page/pages_prev_next_test.go83
-rw-r--r--resources/page/pages_related.go199
-rw-r--r--resources/page/pages_related_test.go86
-rw-r--r--resources/page/pages_sort.go348
-rw-r--r--resources/page/pages_sort_test.go279
-rw-r--r--resources/page/pagination.go404
-rw-r--r--resources/page/pagination_test.go307
-rw-r--r--resources/page/permalinks.go248
-rw-r--r--resources/page/permalinks_test.go180
-rw-r--r--resources/page/site.go53
-rw-r--r--resources/page/testhelpers_test.go554
-rw-r--r--resources/page/weighted.go140
36 files changed, 7025 insertions, 0 deletions
diff --git a/resources/page/page.go b/resources/page/page.go
new file mode 100644
index 000000000..efbefb456
--- /dev/null
+++ b/resources/page/page.go
@@ -0,0 +1,365 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package page contains the core interfaces and types for the Page resource,
+// a core component in Hugo.
+package page
+
+import (
+ "html/template"
+
+ "github.com/bep/gitmap"
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/gohugoio/hugo/common/maps"
+
+ "github.com/gohugoio/hugo/compare"
+
+ "github.com/gohugoio/hugo/navigation"
+ "github.com/gohugoio/hugo/related"
+ "github.com/gohugoio/hugo/resources/resource"
+ "github.com/gohugoio/hugo/source"
+)
+
+// Clear clears any global package state.
+func Clear() error {
+ spc.clear()
+ return nil
+}
+
+// AlternativeOutputFormatsProvider provides alternative output formats for a
+// Page.
+type AlternativeOutputFormatsProvider interface {
+ // AlternativeOutputFormats gives the alternative output formats for the
+ // current output.
+ // Note that we use the term "alternative" and not "alternate" here, as it
+ // does not necessarily replace the other format, it is an alternative representation.
+ AlternativeOutputFormats() OutputFormats
+}
+
+// AuthorProvider provides author information.
+type AuthorProvider interface {
+ Author() Author
+ Authors() AuthorList
+}
+
+// ChildCareProvider provides accessors to child resources.
+type ChildCareProvider interface {
+ Pages() Pages
+ Resources() resource.Resources
+}
+
+// ContentProvider provides the content related values for a Page.
+type ContentProvider interface {
+ Content() (interface{}, error)
+ Plain() string
+ PlainWords() []string
+ Summary() template.HTML
+ Truncated() bool
+ FuzzyWordCount() int
+ WordCount() int
+ ReadingTime() int
+ Len() int
+}
+
+// FileProvider provides the source file.
+type FileProvider interface {
+ File() source.File
+}
+
+// GetPageProvider provides the GetPage method.
+type GetPageProvider interface {
+ // GetPage looks up a page for the given ref.
+ // {{ with .GetPage "blog" }}{{ .Title }}{{ end }}
+ //
+ // This will return nil when no page could be found, and will return
+ // an error if the ref is ambiguous.
+ GetPage(ref string) (Page, error)
+}
+
+// GitInfoProvider provides Git info.
+type GitInfoProvider interface {
+ GitInfo() *gitmap.GitInfo
+}
+
+// InSectionPositioner provides section navigation.
+type InSectionPositioner interface {
+ NextInSection() Page
+ PrevInSection() Page
+}
+
+// InternalDependencies is considered an internal interface.
+type InternalDependencies interface {
+ GetRelatedDocsHandler() *RelatedDocsHandler
+}
+
+// OutputFormatsProvider provides the OutputFormats of a Page.
+type OutputFormatsProvider interface {
+ OutputFormats() OutputFormats
+}
+
+// Page is the core interface in Hugo.
+type Page interface {
+ ContentProvider
+ TableOfContentsProvider
+ PageWithoutContent
+}
+
+// PageMetaProvider provides page metadata, typically provided via front matter.
+type PageMetaProvider interface {
+ // The 4 page dates
+ resource.Dated
+
+ // Aliases forms the base for redirects generation.
+ Aliases() []string
+
+ // BundleType returns the bundle type: "leaf", "branch" or an empty string if it is none.
+ // See https://gohugo.io/content-management/page-bundles/
+ BundleType() string
+
+ // A configured description.
+ Description() string
+
+ // Whether this is a draft. Will only be true if run with the --buildDrafts (-D) flag.
+ Draft() bool
+
+ // IsHome returns whether this is the home page.
+ IsHome() bool
+
+ // Configured keywords.
+ Keywords() []string
+
+ // The Page Kind. One of page, home, section, taxonomy, taxonomyTerm.
+ Kind() string
+
+ // The configured layout to use to render this page. Typically set in front matter.
+ Layout() string
+
+ // The title used for links.
+ LinkTitle() string
+
+ // IsNode returns whether this is an item of one of the list types in Hugo,
+ // i.e. not a regular content
+ IsNode() bool
+
+ // IsPage returns whether this is a regular content
+ IsPage() bool
+
+ // Param looks for a param in Page and then in Site config.
+ Param(key interface{}) (interface{}, error)
+
+ // Path gets the relative path, including file name and extension if relevant,
+ // to the source of this Page. It will be relative to any content root.
+ Path() string
+
+ // The slug, typically defined in front matter.
+ Slug() string
+
+ // This page's language code. Will be the same as the site's.
+ Lang() string
+
+ // IsSection returns whether this is a section
+ IsSection() bool
+
+ // Section returns the first path element below the content root.
+ Section() string
+
+ // Returns a slice of sections (directories if it's a file) to this
+ // Page.
+ SectionsEntries() []string
+
+ // SectionsPath is SectionsEntries joined with a /.
+ SectionsPath() string
+
+ // Sitemap returns the sitemap configuration for this page.
+ Sitemap() config.Sitemap
+
+ // Type is a discriminator used to select layouts etc. It is typically set
+ // in front matter, but will fall back to the root section.
+ Type() string
+
+ // The configured weight, used as the first sort value in the default
+ // page sort if non-zero.
+ Weight() int
+}
+
+// PageRenderProvider provides a way for a Page to render itself.
+type PageRenderProvider interface {
+ Render(layout ...string) template.HTML
+}
+
+// PageWithoutContent is the Page without any of the content methods.
+type PageWithoutContent interface {
+ RawContentProvider
+ resource.Resource
+ PageMetaProvider
+ resource.LanguageProvider
+
+ // For pages backed by a file.
+ FileProvider
+
+ // Output formats
+ OutputFormatsProvider
+ AlternativeOutputFormatsProvider
+
+ // Tree navigation
+ ChildCareProvider
+ TreeProvider
+
+ // Horisontal navigation
+ InSectionPositioner
+ PageRenderProvider
+ PaginatorProvider
+ Positioner
+ navigation.PageMenusProvider
+
+ // TODO(bep)
+ AuthorProvider
+
+ // Page lookups/refs
+ GetPageProvider
+ RefProvider
+
+ resource.TranslationKeyProvider
+ TranslationsProvider
+
+ SitesProvider
+
+ // Helper methods
+ ShortcodeInfoProvider
+ compare.Eqer
+ maps.Scratcher
+ RelatedKeywordsProvider
+
+ DeprecatedWarningPageMethods
+}
+
+// Positioner provides next/prev navigation.
+type Positioner interface {
+ Next() Page
+ Prev() Page
+
+ // Deprecated: Use Prev. Will be removed in Hugo 0.57
+ PrevPage() Page
+
+ // Deprecated: Use Next. Will be removed in Hugo 0.57
+ NextPage() Page
+}
+
+// RawContentProvider provides the raw, unprocessed content of the page.
+type RawContentProvider interface {
+ RawContent() string
+}
+
+// RefProvider provides the methods needed to create reflinks to pages.
+type RefProvider interface {
+ Ref(argsm map[string]interface{}) (string, error)
+ RefFrom(argsm map[string]interface{}, source interface{}) (string, error)
+ RelRef(argsm map[string]interface{}) (string, error)
+ RelRefFrom(argsm map[string]interface{}, source interface{}) (string, error)
+}
+
+// RelatedKeywordsProvider allows a Page to be indexed.
+type RelatedKeywordsProvider interface {
+ // Make it indexable as a related.Document
+ RelatedKeywords(cfg related.IndexConfig) ([]related.Keyword, error)
+}
+
+// ShortcodeInfoProvider provides info about the shortcodes in a Page.
+type ShortcodeInfoProvider interface {
+ // HasShortcode return whether the page has a shortcode with the given name.
+ // This method is mainly motivated with the Hugo Docs site's need for a list
+ // of pages with the `todo` shortcode in it.
+ HasShortcode(name string) bool
+}
+
+// SitesProvider provide accessors to get sites.
+type SitesProvider interface {
+ Site() Site
+ Sites() Sites
+}
+
+// TableOfContentsProvider provides the table of contents for a Page.
+type TableOfContentsProvider interface {
+ TableOfContents() template.HTML
+}
+
+// TranslationsProvider provides access to any translations.
+type TranslationsProvider interface {
+
+ // IsTranslated returns whether this content file is translated to
+ // other language(s).
+ IsTranslated() bool
+
+ // AllTranslations returns all translations, including the current Page.
+ AllTranslations() Pages
+
+ // Translations returns the translations excluding the current Page.
+ Translations() Pages
+}
+
+// TreeProvider provides section tree navigation.
+type TreeProvider interface {
+
+ // IsAncestor returns whether the current page is an ancestor of the given
+ // Note that this method is not relevant for taxonomy lists and taxonomy terms pages.
+ IsAncestor(other interface{}) (bool, error)
+
+ // CurrentSection returns the page's current section or the page itself if home or a section.
+ // Note that this will return nil for pages that is not regular, home or section pages.
+ CurrentSection() Page
+
+ // IsDescendant returns whether the current page is a descendant of the given
+ // Note that this method is not relevant for taxonomy lists and taxonomy terms pages.
+ IsDescendant(other interface{}) (bool, error)
+
+ // FirstSection returns the section on level 1 below home, e.g. "/docs".
+ // For the home page, this will return itself.
+ FirstSection() Page
+
+ // InSection returns whether the given page is in the current section.
+ // Note that this will always return false for pages that are
+ // not either regular, home or section pages.
+ InSection(other interface{}) (bool, error)
+
+ // Parent returns a section's parent section or a page's section.
+ // To get a section's subsections, see Page's Sections method.
+ Parent() Page
+
+ // Sections returns this section's subsections, if any.
+ // Note that for non-sections, this method will always return an empty list.
+ Sections() Pages
+}
+
+// DeprecatedWarningPageMethods lists deprecated Page methods that will trigger
+// a WARNING if invoked.
+// This was added in Hugo 0.55.
+type DeprecatedWarningPageMethods interface {
+ source.FileWithoutOverlap
+ DeprecatedWarningPageMethods1
+}
+
+type DeprecatedWarningPageMethods1 interface {
+ IsDraft() bool
+ Hugo() hugo.Info
+ LanguagePrefix() string
+ GetParam(key string) interface{}
+ RSSLink() template.URL
+ URL() string
+}
+
+// Move here to trigger ERROR instead of WARNING.
+// TODO(bep) create wrappers and put into the Page once it has some methods.
+type DeprecatedErrorPageMethods interface {
+}
diff --git a/resources/page/page_author.go b/resources/page/page_author.go
new file mode 100644
index 000000000..9e8a95182
--- /dev/null
+++ b/resources/page/page_author.go
@@ -0,0 +1,45 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+// AuthorList is a list of all authors and their metadata.
+type AuthorList map[string]Author
+
+// Author contains details about the author of a page.
+type Author struct {
+ GivenName string
+ FamilyName string
+ DisplayName string
+ Thumbnail string
+ Image string
+ ShortBio string
+ LongBio string
+ Email string
+ Social AuthorSocial
+}
+
+// AuthorSocial is a place to put social details per author. These are the
+// standard keys that themes will expect to have available, but can be
+// expanded to any others on a per site basis
+// - website
+// - github
+// - facebook
+// - twitter
+// - googleplus
+// - pinterest
+// - instagram
+// - youtube
+// - linkedin
+// - skype
+type AuthorSocial map[string]string
diff --git a/resources/page/page_data.go b/resources/page/page_data.go
new file mode 100644
index 000000000..3345a44da
--- /dev/null
+++ b/resources/page/page_data.go
@@ -0,0 +1,42 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package page contains the core interfaces and types for the Page resource,
+// a core component in Hugo.
+package page
+
+import (
+ "fmt"
+)
+
+// Data represents the .Data element in a Page in Hugo. We make this
+// a type so we can do lazy loading of .Data.Pages
+type Data map[string]interface{}
+
+// Pages returns the pages stored with key "pages". If this is a func,
+// it will be invoked.
+func (d Data) Pages() Pages {
+ v, found := d["pages"]
+ if !found {
+ return nil
+ }
+
+ switch vv := v.(type) {
+ case Pages:
+ return vv
+ case func() Pages:
+ return vv()
+ default:
+ panic(fmt.Sprintf("%T is not Pages", v))
+ }
+}
diff --git a/resources/page/page_data_test.go b/resources/page/page_data_test.go
new file mode 100644
index 000000000..b6641bcd7
--- /dev/null
+++ b/resources/page/page_data_test.go
@@ -0,0 +1,57 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "bytes"
+ "testing"
+
+ "text/template"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestPageData(t *testing.T) {
+ assert := require.New(t)
+
+ data := make(Data)
+
+ assert.Nil(data.Pages())
+
+ pages := Pages{
+ &testPage{title: "a1"},
+ &testPage{title: "a2"},
+ }
+
+ data["pages"] = pages
+
+ assert.Equal(pages, data.Pages())
+
+ data["pages"] = func() Pages {
+ return pages
+ }
+
+ assert.Equal(pages, data.Pages())
+
+ templ, err := template.New("").Parse(`Pages: {{ .Pages }}`)
+
+ assert.NoError(err)
+
+ var buff bytes.Buffer
+
+ assert.NoError(templ.Execute(&buff, data))
+
+ assert.Contains(buff.String(), "Pages(2)")
+
+}
diff --git a/resources/page/page_generate/.gitignore b/resources/page/page_generate/.gitignore
new file mode 100644
index 000000000..84fd70a9f
--- /dev/null
+++ b/resources/page/page_generate/.gitignore
@@ -0,0 +1 @@
+generate \ No newline at end of file
diff --git a/resources/page/page_generate/generate_page_wrappers.go b/resources/page/page_generate/generate_page_wrappers.go
new file mode 100644
index 000000000..af85cb429
--- /dev/null
+++ b/resources/page/page_generate/generate_page_wrappers.go
@@ -0,0 +1,212 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page_generate
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "path/filepath"
+ "reflect"
+
+ "github.com/pkg/errors"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ "github.com/gohugoio/hugo/codegen"
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/gohugoio/hugo/source"
+)
+
+const header = `// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file is autogenerated.
+`
+
+var (
+ fileInterfaceDeprecated = reflect.TypeOf((*source.FileWithoutOverlap)(nil)).Elem()
+ pageInterfaceDeprecated = reflect.TypeOf((*page.DeprecatedWarningPageMethods)(nil)).Elem()
+ pageInterface = reflect.TypeOf((*page.Page)(nil)).Elem()
+
+ packageDir = filepath.FromSlash("resources/page")
+)
+
+func Generate(c *codegen.Inspector) error {
+ if err := generateMarshalJSON(c); err != nil {
+ return errors.Wrap(err, "failed to generate JSON marshaler")
+
+ }
+
+ if err := generateDeprecatedWrappers(c); err != nil {
+ return errors.Wrap(err, "failed to generate deprecate wrappers")
+ }
+
+ return nil
+}
+
+func generateMarshalJSON(c *codegen.Inspector) error {
+ filename := filepath.Join(c.ProjectRootDir, packageDir, "page_marshaljson.autogen.go")
+ f, err := os.Create(filename)
+
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ includes := []reflect.Type{pageInterface}
+
+ // Exclude these methods
+ excludes := []reflect.Type{
+ // We need to eveluate the deprecated vs JSON in the future,
+ // but leave them out for now.
+ pageInterfaceDeprecated,
+
+ // Leave this out for now. We need to revisit the author issue.
+ reflect.TypeOf((*page.AuthorProvider)(nil)).Elem(),
+
+ // navigation.PageMenus
+
+ // Prevent loops.
+ reflect.TypeOf((*page.SitesProvider)(nil)).Elem(),
+ reflect.TypeOf((*page.Positioner)(nil)).Elem(),
+
+ reflect.TypeOf((*page.ChildCareProvider)(nil)).Elem(),
+ reflect.TypeOf((*page.TreeProvider)(nil)).Elem(),
+ reflect.TypeOf((*page.InSectionPositioner)(nil)).Elem(),
+ reflect.TypeOf((*page.PaginatorProvider)(nil)).Elem(),
+ reflect.TypeOf((*maps.Scratcher)(nil)).Elem(),
+ }
+
+ methods := c.MethodsFromTypes(
+ includes,
+ excludes)
+
+ if len(methods) == 0 {
+ return errors.New("no methods found")
+ }
+
+ marshalJSON, pkgImports := methods.ToMarshalJSON("Page", "github.com/gohugoio/hugo/resources/page")
+
+ fmt.Fprintf(f, `%s
+
+package page
+
+%s
+
+
+%s
+
+
+`, header, importsString(pkgImports), marshalJSON)
+
+ return nil
+}
+
+func generateDeprecatedWrappers(c *codegen.Inspector) error {
+ filename := filepath.Join(c.ProjectRootDir, packageDir, "page_wrappers.autogen.go")
+ f, err := os.Create(filename)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ // Generate a wrapper for deprecated page methods
+
+ reasons := map[string]string{
+ "IsDraft": "Use .Draft.",
+ "Hugo": "Use the global hugo function.",
+ "LanguagePrefix": "Use .Site.LanguagePrefix.",
+ "GetParam": "Use .Param or .Params.myParam.",
+ "RSSLink": `Use the Output Format's link, e.g. something like:
+ {{ with .OutputFormats.Get "RSS" }}{{ . RelPermalink }}{{ end }}`,
+ "URL": "Use .Permalink or .RelPermalink. If what you want is the front matter URL value, use .Params.url",
+ }
+
+ deprecated := func(name string, tp reflect.Type) string {
+ var alternative string
+ if tp == fileInterfaceDeprecated {
+ alternative = "Use .File." + name
+ } else {
+ var found bool
+ alternative, found = reasons[name]
+ if !found {
+ panic(fmt.Sprintf("no deprecated reason found for %q", name))
+ }
+ }
+
+ return fmt.Sprintf("helpers.Deprecated(%q, %q, %q, false)", "Page", "."+name, alternative)
+ }
+
+ var buff bytes.Buffer
+
+ methods := c.MethodsFromTypes([]reflect.Type{fileInterfaceDeprecated, pageInterfaceDeprecated}, nil)
+
+ for _, m := range methods {
+ fmt.Fprint(&buff, m.Declaration("*pageDeprecated"))
+ fmt.Fprintln(&buff, " {")
+ fmt.Fprintf(&buff, "\t%s\n", deprecated(m.Name, m.Owner))
+ fmt.Fprintf(&buff, "\t%s\n}\n", m.Delegate("p", "p"))
+
+ }
+
+ pkgImports := append(methods.Imports(), "github.com/gohugoio/hugo/helpers")
+
+ fmt.Fprintf(f, `%s
+
+package page
+
+%s
+// NewDeprecatedWarningPage adds deprecation warnings to the given implementation.
+func NewDeprecatedWarningPage(p DeprecatedWarningPageMethods) DeprecatedWarningPageMethods {
+ return &pageDeprecated{p: p}
+}
+
+type pageDeprecated struct {
+ p DeprecatedWarningPageMethods
+}
+
+%s
+
+`, header, importsString(pkgImports), buff.String())
+
+ return nil
+}
+
+func importsString(imps []string) string {
+ if len(imps) == 0 {
+ return ""
+ }
+
+ if len(imps) == 1 {
+ return fmt.Sprintf("import %q", imps[0])
+ }
+
+ impsStr := "import (\n"
+ for _, imp := range imps {
+ impsStr += fmt.Sprintf("%q\n", imp)
+ }
+
+ return impsStr + ")"
+}
diff --git a/resources/page/page_kinds.go b/resources/page/page_kinds.go
new file mode 100644
index 000000000..a2e59438e
--- /dev/null
+++ b/resources/page/page_kinds.go
@@ -0,0 +1,25 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+const (
+ KindPage = "page"
+
+ // The rest are node types; home page, sections etc.
+
+ KindHome = "home"
+ KindSection = "section"
+ KindTaxonomy = "taxonomy"
+ KindTaxonomyTerm = "taxonomyTerm"
+)
diff --git a/resources/page/page_kinds_test.go b/resources/page/page_kinds_test.go
new file mode 100644
index 000000000..8ad7343dc
--- /dev/null
+++ b/resources/page/page_kinds_test.go
@@ -0,0 +1,31 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestKind(t *testing.T) {
+ t.Parallel()
+ // Add tests for these constants to make sure they don't change
+ require.Equal(t, "page", KindPage)
+ require.Equal(t, "home", KindHome)
+ require.Equal(t, "section", KindSection)
+ require.Equal(t, "taxonomy", KindTaxonomy)
+ require.Equal(t, "taxonomyTerm", KindTaxonomyTerm)
+
+}
diff --git a/resources/page/page_marshaljson.autogen.go b/resources/page/page_marshaljson.autogen.go
new file mode 100644
index 000000000..5f4c9d32f
--- /dev/null
+++ b/resources/page/page_marshaljson.autogen.go
@@ -0,0 +1,198 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file is autogenerated.
+
+package page
+
+import (
+ "encoding/json"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/navigation"
+ "github.com/gohugoio/hugo/source"
+ "html/template"
+ "time"
+)
+
+func MarshalPageToJSON(p Page) ([]byte, error) {
+ content, err := p.Content()
+ if err != nil {
+ return nil, err
+ }
+ plain := p.Plain()
+ plainWords := p.PlainWords()
+ summary := p.Summary()
+ truncated := p.Truncated()
+ fuzzyWordCount := p.FuzzyWordCount()
+ wordCount := p.WordCount()
+ readingTime := p.ReadingTime()
+ length := p.Len()
+ tableOfContents := p.TableOfContents()
+ rawContent := p.RawContent()
+ mediaType := p.MediaType()
+ resourceType := p.ResourceType()
+ permalink := p.Permalink()
+ relPermalink := p.RelPermalink()
+ name := p.Name()
+ title := p.Title()
+ params := p.Params()
+ data := p.Data()
+ date := p.Date()
+ lastmod := p.Lastmod()
+ publishDate := p.PublishDate()
+ expiryDate := p.ExpiryDate()
+ aliases := p.Aliases()
+ bundleType := p.BundleType()
+ description := p.Description()
+ draft := p.Draft()
+ isHome := p.IsHome()
+ keywords := p.Keywords()
+ kind := p.Kind()
+ layout := p.Layout()
+ linkTitle := p.LinkTitle()
+ isNode := p.IsNode()
+ isPage := p.IsPage()
+ path := p.Path()
+ slug := p.Slug()
+ lang := p.Lang()
+ isSection := p.IsSection()
+ section := p.Section()
+ sectionsEntries := p.SectionsEntries()
+ sectionsPath := p.SectionsPath()
+ sitemap := p.Sitemap()
+ typ := p.Type()
+ weight := p.Weight()
+ language := p.Language()
+ file := p.File()
+ outputFormats := p.OutputFormats()
+ alternativeOutputFormats := p.AlternativeOutputFormats()
+ menus := p.Menus()
+ translationKey := p.TranslationKey()
+ isTranslated := p.IsTranslated()
+ allTranslations := p.AllTranslations()
+ translations := p.Translations()
+
+ s := struct {
+ Content interface{}
+ Plain string
+ PlainWords []string
+ Summary template.HTML
+ Truncated bool
+ FuzzyWordCount int
+ WordCount int
+ ReadingTime int
+ Len int
+ TableOfContents template.HTML
+ RawContent string
+ MediaType media.Type
+ ResourceType string
+ Permalink string
+ RelPermalink string
+ Name string
+ Title string
+ Params map[string]interface{}
+ Data interface{}
+ Date time.Time
+ Lastmod time.Time
+ PublishDate time.Time
+ ExpiryDate time.Time
+ Aliases []string
+ BundleType string
+ Description string
+ Draft bool
+ IsHome bool
+ Keywords []string
+ Kind string
+ Layout string
+ LinkTitle string
+ IsNode bool
+ IsPage bool
+ Path string
+ Slug string
+ Lang string
+ IsSection bool
+ Section string
+ SectionsEntries []string
+ SectionsPath string
+ Sitemap config.Sitemap
+ Type string
+ Weight int
+ Language *langs.Language
+ File source.File
+ OutputFormats OutputFormats
+ AlternativeOutputFormats OutputFormats
+ Menus navigation.PageMenus
+ TranslationKey string
+ IsTranslated bool
+ AllTranslations Pages
+ Translations Pages
+ }{
+ Content: content,
+ Plain: plain,
+ PlainWords: plainWords,
+ Summary: summary,
+ Truncated: truncated,
+ FuzzyWordCount: fuzzyWordCount,
+ WordCount: wordCount,
+ ReadingTime: readingTime,
+ Len: length,
+ TableOfContents: tableOfContents,
+ RawContent: rawContent,
+ MediaType: mediaType,
+ ResourceType: resourceType,
+ Permalink: permalink,
+ RelPermalink: relPermalink,
+ Name: name,
+ Title: title,
+ Params: params,
+ Data: data,
+ Date: date,
+ Lastmod: lastmod,
+ PublishDate: publishDate,
+ ExpiryDate: expiryDate,
+ Aliases: aliases,
+ BundleType: bundleType,
+ Description: description,
+ Draft: draft,
+ IsHome: isHome,
+ Keywords: keywords,
+ Kind: kind,
+ Layout: layout,
+ LinkTitle: linkTitle,
+ IsNode: isNode,
+ IsPage: isPage,
+ Path: path,
+ Slug: slug,
+ Lang: lang,
+ IsSection: isSection,
+ Section: section,
+ SectionsEntries: sectionsEntries,
+ SectionsPath: sectionsPath,
+ Sitemap: sitemap,
+ Type: typ,
+ Weight: weight,
+ Language: language,
+ File: file,
+ OutputFormats: outputFormats,
+ AlternativeOutputFormats: alternativeOutputFormats,
+ Menus: menus,
+ TranslationKey: translationKey,
+ IsTranslated: isTranslated,
+ AllTranslations: allTranslations,
+ Translations: translations,
+ }
+
+ return json.Marshal(&s)
+}
diff --git a/resources/page/page_nop.go b/resources/page/page_nop.go
new file mode 100644
index 000000000..7afbee216
--- /dev/null
+++ b/resources/page/page_nop.go
@@ -0,0 +1,463 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package page contains the core interfaces and types for the Page resource,
+// a core component in Hugo.
+package page
+
+import (
+ "html/template"
+ "os"
+ "time"
+
+ "github.com/bep/gitmap"
+ "github.com/gohugoio/hugo/navigation"
+
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/source"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/related"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+var (
+ NopPage Page = new(nopPage)
+ NilPage *nopPage
+)
+
+// PageNop implements Page, but does nothing.
+type nopPage int
+
+func (p *nopPage) Aliases() []string {
+ return nil
+}
+
+func (p *nopPage) Sitemap() config.Sitemap {
+ return config.Sitemap{}
+}
+
+func (p *nopPage) Layout() string {
+ return ""
+}
+
+func (p *nopPage) RSSLink() template.URL {
+ return ""
+}
+
+func (p *nopPage) Author() Author {
+ return Author{}
+
+}
+func (p *nopPage) Authors() AuthorList {
+ return nil
+}
+
+func (p *nopPage) AllTranslations() Pages {
+ return nil
+}
+
+func (p *nopPage) LanguagePrefix() string {
+ return ""
+}
+
+func (p *nopPage) AlternativeOutputFormats() OutputFormats {
+ return nil
+}
+
+func (p *nopPage) BaseFileName() string {
+ return ""
+}
+
+func (p *nopPage) BundleType() string {
+ return ""
+}
+
+func (p *nopPage) Content() (interface{}, error) {
+ return "", nil
+}
+
+func (p *nopPage) ContentBaseName() string {
+ return ""
+}
+
+func (p *nopPage) CurrentSection() Page {
+ return nil
+}
+
+func (p *nopPage) Data() interface{} {
+ return nil
+}
+
+func (p *nopPage) Date() (t time.Time) {
+ return
+}
+
+func (p *nopPage) Description() string {
+ return ""
+}
+
+func (p *nopPage) RefFrom(argsm map[string]interface{}, source interface{}) (string, error) {
+ return "", nil
+}
+func (p *nopPage) RelRefFrom(argsm map[string]interface{}, source interface{}) (string, error) {
+ return "", nil
+}
+
+func (p *nopPage) Dir() string {
+ return ""
+}
+
+func (p *nopPage) Draft() bool {
+ return false
+}
+
+func (p *nopPage) Eq(other interface{}) bool {
+ return p == other
+}
+
+func (p *nopPage) ExpiryDate() (t time.Time) {
+ return
+}
+
+func (p *nopPage) Ext() string {
+ return ""
+}
+
+func (p *nopPage) Extension() string {
+ return ""
+}
+
+var nilFile *source.FileInfo
+
+func (p *nopPage) File() source.File {
+ return nilFile
+}
+
+func (p *nopPage) FileInfo() os.FileInfo {
+ return nil
+}
+
+func (p *nopPage) Filename() string {
+ return ""
+}
+
+func (p *nopPage) FirstSection() Page {
+ return nil
+}
+
+func (p *nopPage) FuzzyWordCount() int {
+ return 0
+}
+
+func (p *nopPage) GetPage(ref string) (Page, error) {
+ return nil, nil
+}
+
+func (p *nopPage) GetParam(key string) interface{} {
+ return nil
+}
+
+func (p *nopPage) GitInfo() *gitmap.GitInfo {
+ return nil
+}
+
+func (p *nopPage) HasMenuCurrent(menuID string, me *navigation.MenuEntry) bool {
+ return false
+}
+
+func (p *nopPage) HasShortcode(name string) bool {
+ return false
+}
+
+func (p *nopPage) Hugo() (h hugo.Info) {
+ return
+}
+
+func (p *nopPage) InSection(other interface{}) (bool, error) {
+ return false, nil
+}
+
+func (p *nopPage) IsAncestor(other interface{}) (bool, error) {
+ return false, nil
+}
+
+func (p *nopPage) IsDescendant(other interface{}) (bool, error) {
+ return false, nil
+}
+
+func (p *nopPage) IsDraft() bool {
+ return false
+}
+
+func (p *nopPage) IsHome() bool {
+ return false
+}
+
+func (p *nopPage) IsMenuCurrent(menuID string, inme *navigation.MenuEntry) bool {
+ return false
+}
+
+func (p *nopPage) IsNode() bool {
+ return false
+}
+
+func (p *nopPage) IsPage() bool {
+ return false
+}
+
+func (p *nopPage) IsSection() bool {
+ return false
+}
+
+func (p *nopPage) IsTranslated() bool {
+ return false
+}
+
+func (p *nopPage) Keywords() []string {
+ return nil
+}
+
+func (p *nopPage) Kind() string {
+ return ""
+}
+
+func (p *nopPage) Lang() string {
+ return ""
+}
+
+func (p *nopPage) Language() *langs.Language {
+ return nil
+}
+
+func (p *nopPage) Lastmod() (t time.Time) {
+ return
+}
+
+func (p *nopPage) Len() int {
+ return 0
+}
+
+func (p *nopPage) LinkTitle() string {
+ return ""
+}
+
+func (p *nopPage) LogicalName() string {
+ return ""
+}
+
+func (p *nopPage) MediaType() (m media.Type) {
+ return
+}
+
+func (p *nopPage) Menus() (m navigation.PageMenus) {
+ return
+}
+
+func (p *nopPage) Name() string {
+ return ""
+}
+
+func (p *nopPage) Next() Page {
+ return nil
+}
+
+func (p *nopPage) OutputFormats() OutputFormats {
+ return nil
+}
+
+func (p *nopPage) Pages() Pages {
+ return nil
+}
+
+func (p *nopPage) Paginate(seq interface{}, options ...interface{}) (*Pager, error) {
+ return nil, nil
+}
+
+func (p *nopPage) Paginator(options ...interface{}) (*Pager, error) {
+ return nil, nil
+}
+
+func (p *nopPage) Param(key interface{}) (interface{}, error) {
+ return nil, nil
+}
+
+func (p *nopPage) Params() map[string]interface{} {
+ return nil
+}
+
+func (p *nopPage) Parent() Page {
+ return nil
+}
+
+func (p *nopPage) Path() string {
+ return ""
+}
+
+func (p *nopPage) Permalink() string {
+ return ""
+}
+
+func (p *nopPage) Plain() string {
+ return ""
+}
+
+func (p *nopPage) PlainWords() []string {
+ return nil
+}
+
+func (p *nopPage) Prev() Page {
+ return nil
+}
+
+func (p *nopPage) PublishDate() (t time.Time) {
+ return
+}
+
+func (p *nopPage) PrevInSection() Page {
+ return nil
+}
+func (p *nopPage) NextInSection() Page {
+ return nil
+}
+
+func (p *nopPage) PrevPage() Page {
+ return nil
+}
+
+func (p *nopPage) NextPage() Page {
+ return nil
+}
+
+func (p *nopPage) RawContent() string {
+ return ""
+}
+
+func (p *nopPage) ReadingTime() int {
+ return 0
+}
+
+func (p *nopPage) Ref(argsm map[string]interface{}) (string, error) {
+ return "", nil
+}
+
+func (p *nopPage) RelPermalink() string {
+ return ""
+}
+
+func (p *nopPage) RelRef(argsm map[string]interface{}) (string, error) {
+ return "", nil
+}
+
+func (p *nopPage) Render(layout ...string) template.HTML {
+ return ""
+}
+
+func (p *nopPage) ResourceType() string {
+ return ""
+}
+
+func (p *nopPage) Resources() resource.Resources {
+ return nil
+}
+
+func (p *nopPage) Scratch() *maps.Scratch {
+ return nil
+}
+
+func (p *nopPage) RelatedKeywords(cfg related.IndexConfig) ([]related.Keyword, error) {
+ return nil, nil
+}
+
+func (p *nopPage) Section() string {
+ return ""
+}
+
+func (p *nopPage) Sections() Pages {
+ return nil
+}
+
+func (p *nopPage) SectionsEntries() []string {
+ return nil
+}
+
+func (p *nopPage) SectionsPath() string {
+ return ""
+}
+
+func (p *nopPage) Site() Site {
+ return nil
+}
+
+func (p *nopPage) Sites() Sites {
+ return nil
+}
+
+func (p *nopPage) Slug() string {
+ return ""
+}
+
+func (p *nopPage) String() string {
+ return "nopPage"
+}
+
+func (p *nopPage) Summary() template.HTML {
+ return ""
+}
+
+func (p *nopPage) TableOfContents() template.HTML {
+ return ""
+}
+
+func (p *nopPage) Title() string {
+ return ""
+}
+
+func (p *nopPage) TranslationBaseName() string {
+ return ""
+}
+
+func (p *nopPage) TranslationKey() string {
+ return ""
+}
+
+func (p *nopPage) Translations() Pages {
+ return nil
+}
+
+func (p *nopPage) Truncated() bool {
+ return false
+}
+
+func (p *nopPage) Type() string {
+ return ""
+}
+
+func (p *nopPage) URL() string {
+ return ""
+}
+
+func (p *nopPage) UniqueID() string {
+ return ""
+}
+
+func (p *nopPage) Weight() int {
+ return 0
+}
+
+func (p *nopPage) WordCount() int {
+ return 0
+}
diff --git a/resources/page/page_outputformat.go b/resources/page/page_outputformat.go
new file mode 100644
index 000000000..ff4213cc4
--- /dev/null
+++ b/resources/page/page_outputformat.go
@@ -0,0 +1,85 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package page contains the core interfaces and types for the Page resource,
+// a core component in Hugo.
+package page
+
+import (
+ "strings"
+
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/output"
+)
+
+// OutputFormats holds a list of the relevant output formats for a given page.
+type OutputFormats []OutputFormat
+
+// OutputFormat links to a representation of a resource.
+type OutputFormat struct {
+ // Rel constains a value that can be used to construct a rel link.
+ // This is value is fetched from the output format definition.
+ // Note that for pages with only one output format,
+ // this method will always return "canonical".
+ // As an example, the AMP output format will, by default, return "amphtml".
+ //
+ // See:
+ // https://www.ampproject.org/docs/guides/deploy/discovery
+ //
+ // Most other output formats will have "alternate" as value for this.
+ Rel string
+
+ Format output.Format
+
+ relPermalink string
+ permalink string
+}
+
+// Name returns this OutputFormat's name, i.e. HTML, AMP, JSON etc.
+func (o OutputFormat) Name() string {
+ return o.Format.Name
+}
+
+// MediaType returns this OutputFormat's MediaType (MIME type).
+func (o OutputFormat) MediaType() media.Type {
+ return o.Format.MediaType
+}
+
+// Permalink returns the absolute permalink to this output format.
+func (o OutputFormat) Permalink() string {
+ return o.permalink
+}
+
+// RelPermalink returns the relative permalink to this output format.
+func (o OutputFormat) RelPermalink() string {
+ return o.relPermalink
+}
+
+func NewOutputFormat(relPermalink, permalink string, isCanonical bool, f output.Format) OutputFormat {
+ rel := f.Rel
+ if isCanonical {
+ rel = "canonical"
+ }
+ return OutputFormat{Rel: rel, Format: f, relPermalink: relPermalink, permalink: permalink}
+}
+
+// Get gets a OutputFormat given its name, i.e. json, html etc.
+// It returns nil if none found.
+func (o OutputFormats) Get(name string) *OutputFormat {
+ for _, f := range o {
+ if strings.EqualFold(f.Format.Name, name) {
+ return &f
+ }
+ }
+ return nil
+}
diff --git a/resources/page/page_paths.go b/resources/page/page_paths.go
new file mode 100644
index 000000000..160c225b1
--- /dev/null
+++ b/resources/page/page_paths.go
@@ -0,0 +1,334 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "path"
+ "path/filepath"
+
+ "strings"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/output"
+)
+
+const slash = "/"
+
+// TargetPathDescriptor describes how a file path for a given resource
+// should look like on the file system. The same descriptor is then later used to
+// create both the permalinks and the relative links, paginator URLs etc.
+//
+// The big motivating behind this is to have only one source of truth for URLs,
+// and by that also get rid of most of the fragile string parsing/encoding etc.
+//
+//
+type TargetPathDescriptor struct {
+ PathSpec *helpers.PathSpec
+
+ Type output.Format
+ Kind string
+
+ Sections []string
+
+ // For regular content pages this is either
+ // 1) the Slug, if set,
+ // 2) the file base name (TranslationBaseName).
+ BaseName string
+
+ // Source directory.
+ Dir string
+
+ // Typically a language prefix added to file paths.
+ PrefixFilePath string
+
+ // Typically a language prefix added to links.
+ PrefixLink string
+
+ // If in multihost mode etc., every link/path needs to be prefixed, even
+ // if set in URL.
+ ForcePrefix bool
+
+ // URL from front matter if set. Will override any Slug etc.
+ URL string
+
+ // Used to create paginator links.
+ Addends string
+
+ // The expanded permalink if defined for the section, ready to use.
+ ExpandedPermalink string
+
+ // Some types cannot have uglyURLs, even if globally enabled, RSS being one example.
+ UglyURLs bool
+}
+
+// TODO(bep) move this type.
+type TargetPaths struct {
+
+ // Where to store the file on disk relative to the publish dir. OS slashes.
+ TargetFilename string
+
+ // The directory to write sub-resources of the above.
+ SubResourceBaseTarget string
+
+ // The base for creating links to sub-resources of the above.
+ SubResourceBaseLink string
+
+ // The relative permalink to this resources. Unix slashes.
+ Link string
+}
+
+func (p TargetPaths) RelPermalink(s *helpers.PathSpec) string {
+ return s.PrependBasePath(p.Link, false)
+}
+
+func (p TargetPaths) PermalinkForOutputFormat(s *helpers.PathSpec, f output.Format) string {
+ var baseURL string
+ var err error
+ if f.Protocol != "" {
+ baseURL, err = s.BaseURL.WithProtocol(f.Protocol)
+ if err != nil {
+ return ""
+ }
+ } else {
+ baseURL = s.BaseURL.String()
+ }
+
+ return s.PermalinkForBaseURL(p.Link, baseURL)
+}
+
+func isHtmlIndex(s string) bool {
+ return strings.HasSuffix(s, "/index.html")
+}
+
+func CreateTargetPaths(d TargetPathDescriptor) (tp TargetPaths) {
+
+ if d.Type.Name == "" {
+ panic("CreateTargetPath: missing type")
+ }
+
+ // Normalize all file Windows paths to simplify what's next.
+ if helpers.FilePathSeparator != slash {
+ d.Dir = filepath.ToSlash(d.Dir)
+ d.PrefixFilePath = filepath.ToSlash(d.PrefixFilePath)
+
+ }
+
+ pagePath := slash
+
+ var (
+ pagePathDir string
+ link string
+ linkDir string
+ )
+
+ // The top level index files, i.e. the home page etc., needs
+ // the index base even when uglyURLs is enabled.
+ needsBase := true
+
+ isUgly := d.UglyURLs && !d.Type.NoUgly
+ baseNameSameAsType := d.BaseName != "" && d.BaseName == d.Type.BaseName
+
+ if d.ExpandedPermalink == "" && baseNameSameAsType {
+ isUgly = true
+ }
+
+ if d.Kind != KindPage && d.URL == "" && len(d.Sections) > 0 {
+ if d.ExpandedPermalink != "" {
+ pagePath = pjoin(pagePath, d.ExpandedPermalink)
+ } else {
+ pagePath = pjoin(d.Sections...)
+ }
+ needsBase = false
+ }
+
+ if d.Type.Path != "" {
+ pagePath = pjoin(pagePath, d.Type.Path)
+ }
+
+ if d.Kind != KindHome && d.URL != "" {
+ pagePath = pjoin(pagePath, d.URL)
+
+ if d.Addends != "" {
+ pagePath = pjoin(pagePath, d.Addends)
+ }
+
+ pagePathDir = pagePath
+ link = pagePath
+ hasDot := strings.Contains(d.URL, ".")
+ hasSlash := strings.HasSuffix(d.URL, slash)
+
+ if hasSlash || !hasDot {
+ pagePath = pjoin(pagePath, d.Type.BaseName+d.Type.MediaType.FullSuffix())
+ } else if hasDot {
+ pagePathDir = path.Dir(pagePathDir)
+ }
+
+ if !isHtmlIndex(pagePath) {
+ link = pagePath
+ } else if !hasSlash {
+ link += slash
+ }
+
+ linkDir = pagePathDir
+
+ if d.ForcePrefix {
+
+ // Prepend language prefix if not already set in URL
+ if d.PrefixFilePath != "" && !strings.HasPrefix(d.URL, slash+d.PrefixFilePath) {
+ pagePath = pjoin(d.PrefixFilePath, pagePath)
+ pagePathDir = pjoin(d.PrefixFilePath, pagePathDir)
+ }
+
+ if d.PrefixLink != "" && !strings.HasPrefix(d.URL, slash+d.PrefixLink) {
+ link = pjoin(d.PrefixLink, link)
+ linkDir = pjoin(d.PrefixLink, linkDir)
+ }
+ }
+
+ } else if d.Kind == KindPage {
+
+ if d.ExpandedPermalink != "" {
+ pagePath = pjoin(pagePath, d.ExpandedPermalink)
+
+ } else {
+ if d.Dir != "" {
+ pagePath = pjoin(pagePath, d.Dir)
+ }
+ if d.BaseName != "" {
+ pagePath = pjoin(pagePath, d.BaseName)
+ }
+ }
+
+ if d.Addends != "" {
+ pagePath = pjoin(pagePath, d.Addends)
+ }
+
+ link = pagePath
+
+ if baseNameSameAsType {
+ link = strings.TrimSuffix(link, d.BaseName)
+ }
+
+ pagePathDir = link
+ link = link + slash
+ linkDir = pagePathDir
+
+ if isUgly {
+ pagePath = addSuffix(pagePath, d.Type.MediaType.FullSuffix())
+ } else {
+ pagePath = pjoin(pagePath, d.Type.BaseName+d.Type.MediaType.FullSuffix())
+ }
+
+ if isUgly && !isHtmlIndex(pagePath) {
+ link = pagePath
+ }
+
+ if d.PrefixFilePath != "" {
+ pagePath = pjoin(d.PrefixFilePath, pagePath)
+ pagePathDir = pjoin(d.PrefixFilePath, pagePathDir)
+ }
+
+ if d.PrefixLink != "" {
+ link = pjoin(d.PrefixLink, link)
+ linkDir = pjoin(d.PrefixLink, linkDir)
+ }
+
+ } else {
+ if d.Addends != "" {
+ pagePath = pjoin(pagePath, d.Addends)
+ }
+
+ needsBase = needsBase && d.Addends == ""
+
+ // No permalink expansion etc. for node type pages (for now)
+ base := ""
+
+ if needsBase || !isUgly {
+ base = d.Type.BaseName
+ }
+
+ pagePathDir = pagePath
+ link = pagePath
+ linkDir = pagePathDir
+
+ if base != "" {
+ pagePath = path.Join(pagePath, addSuffix(base, d.Type.MediaType.FullSuffix()))
+ } else {
+ pagePath = addSuffix(pagePath, d.Type.MediaType.FullSuffix())
+
+ }
+
+ if !isHtmlIndex(pagePath) {
+ link = pagePath
+ } else {
+ link += slash
+ }
+
+ if d.PrefixFilePath != "" {
+ pagePath = pjoin(d.PrefixFilePath, pagePath)
+ pagePathDir = pjoin(d.PrefixFilePath, pagePathDir)
+ }
+
+ if d.PrefixLink != "" {
+ link = pjoin(d.PrefixLink, link)
+ linkDir = pjoin(d.PrefixLink, linkDir)
+ }
+ }
+
+ pagePath = pjoin(slash, pagePath)
+ pagePathDir = strings.TrimSuffix(path.Join(slash, pagePathDir), slash)
+
+ hadSlash := strings.HasSuffix(link, slash)
+ link = strings.Trim(link, slash)
+ if hadSlash {
+ link += slash
+ }
+
+ if !strings.HasPrefix(link, slash) {
+ link = slash + link
+ }
+
+ linkDir = strings.TrimSuffix(path.Join(slash, linkDir), slash)
+
+ // Note: MakePathSanitized will lower case the path if
+ // disablePathToLower isn't set.
+ pagePath = d.PathSpec.MakePathSanitized(pagePath)
+ pagePathDir = d.PathSpec.MakePathSanitized(pagePathDir)
+ link = d.PathSpec.MakePathSanitized(link)
+ linkDir = d.PathSpec.MakePathSanitized(linkDir)
+
+ tp.TargetFilename = filepath.FromSlash(pagePath)
+ tp.SubResourceBaseTarget = filepath.FromSlash(pagePathDir)
+ tp.SubResourceBaseLink = linkDir
+ tp.Link = d.PathSpec.URLizeFilename(link)
+ if tp.Link == "" {
+ tp.Link = slash
+ }
+
+ return
+}
+
+func addSuffix(s, suffix string) string {
+ return strings.Trim(s, slash) + suffix
+}
+
+// Like path.Join, but preserves one trailing slash if present.
+func pjoin(elem ...string) string {
+ hadSlash := strings.HasSuffix(elem[len(elem)-1], slash)
+ joined := path.Join(elem...)
+ if hadSlash && !strings.HasSuffix(joined, slash) {
+ return joined + slash
+ }
+ return joined
+}
diff --git a/resources/page/page_paths_test.go b/resources/page/page_paths_test.go
new file mode 100644
index 000000000..4aaa41e8a
--- /dev/null
+++ b/resources/page/page_paths_test.go
@@ -0,0 +1,258 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/media"
+
+ "fmt"
+
+ "github.com/gohugoio/hugo/output"
+)
+
+func TestPageTargetPath(t *testing.T) {
+
+ pathSpec := newTestPathSpec()
+
+ noExtNoDelimMediaType := media.TextType
+ noExtNoDelimMediaType.Suffixes = []string{}
+ noExtNoDelimMediaType.Delimiter = ""
+
+ // Netlify style _redirects
+ noExtDelimFormat := output.Format{
+ Name: "NER",
+ MediaType: noExtNoDelimMediaType,
+ BaseName: "_redirects",
+ }
+
+ for _, langPrefixPath := range []string{"", "no"} {
+ for _, langPrefixLink := range []string{"", "no"} {
+ for _, uglyURLs := range []bool{false, true} {
+
+ tests := []struct {
+ name string
+ d TargetPathDescriptor
+ expected TargetPaths
+ }{
+ {"JSON home", TargetPathDescriptor{Kind: KindHome, Type: output.JSONFormat}, TargetPaths{TargetFilename: "/index.json", SubResourceBaseTarget: "", Link: "/index.json"}},
+ {"AMP home", TargetPathDescriptor{Kind: KindHome, Type: output.AMPFormat}, TargetPaths{TargetFilename: "/amp/index.html", SubResourceBaseTarget: "/amp", Link: "/amp/"}},
+ {"HTML home", TargetPathDescriptor{Kind: KindHome, BaseName: "_index", Type: output.HTMLFormat}, TargetPaths{TargetFilename: "/index.html", SubResourceBaseTarget: "", Link: "/"}},
+ {"Netlify redirects", TargetPathDescriptor{Kind: KindHome, BaseName: "_index", Type: noExtDelimFormat}, TargetPaths{TargetFilename: "/_redirects", SubResourceBaseTarget: "", Link: "/_redirects"}},
+ {"HTML section list", TargetPathDescriptor{
+ Kind: KindSection,
+ Sections: []string{"sect1"},
+ BaseName: "_index",
+ Type: output.HTMLFormat}, TargetPaths{TargetFilename: "/sect1/index.html", SubResourceBaseTarget: "/sect1", Link: "/sect1/"}},
+ {"HTML taxonomy list", TargetPathDescriptor{
+ Kind: KindTaxonomy,
+ Sections: []string{"tags", "hugo"},
+ BaseName: "_index",
+ Type: output.HTMLFormat}, TargetPaths{TargetFilename: "/tags/hugo/index.html", SubResourceBaseTarget: "/tags/hugo", Link: "/tags/hugo/"}},
+ {"HTML taxonomy term", TargetPathDescriptor{
+ Kind: KindTaxonomy,
+ Sections: []string{"tags"},
+ BaseName: "_index",
+ Type: output.HTMLFormat}, TargetPaths{TargetFilename: "/tags/index.html", SubResourceBaseTarget: "/tags", Link: "/tags/"}},
+ {
+ "HTML page", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/a/b",
+ BaseName: "mypage",
+ Sections: []string{"a"},
+ Type: output.HTMLFormat}, TargetPaths{TargetFilename: "/a/b/mypage/index.html", SubResourceBaseTarget: "/a/b/mypage", Link: "/a/b/mypage/"}},
+
+ {
+ "HTML page with index as base", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/a/b",
+ BaseName: "index",
+ Sections: []string{"a"},
+ Type: output.HTMLFormat}, TargetPaths{TargetFilename: "/a/b/index.html", SubResourceBaseTarget: "/a/b", Link: "/a/b/"}},
+
+ {
+ "HTML page with special chars", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/a/b",
+ BaseName: "My Page!",
+ Type: output.HTMLFormat}, TargetPaths{TargetFilename: "/a/b/my-page/index.html", SubResourceBaseTarget: "/a/b/my-page", Link: "/a/b/my-page/"}},
+ {"RSS home", TargetPathDescriptor{Kind: "rss", Type: output.RSSFormat}, TargetPaths{TargetFilename: "/index.xml", SubResourceBaseTarget: "", Link: "/index.xml"}},
+ {"RSS section list", TargetPathDescriptor{
+ Kind: "rss",
+ Sections: []string{"sect1"},
+ Type: output.RSSFormat}, TargetPaths{TargetFilename: "/sect1/index.xml", SubResourceBaseTarget: "/sect1", Link: "/sect1/index.xml"}},
+ {
+ "AMP page", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/a/b/c",
+ BaseName: "myamp",
+ Type: output.AMPFormat}, TargetPaths{TargetFilename: "/amp/a/b/c/myamp/index.html", SubResourceBaseTarget: "/amp/a/b/c/myamp", Link: "/amp/a/b/c/myamp/"}},
+ {
+ "AMP page with URL with suffix", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/sect/",
+ BaseName: "mypage",
+ URL: "/some/other/url.xhtml",
+ Type: output.HTMLFormat}, TargetPaths{TargetFilename: "/some/other/url.xhtml", SubResourceBaseTarget: "/some/other", Link: "/some/other/url.xhtml"}},
+ {
+ "JSON page with URL without suffix", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/sect/",
+ BaseName: "mypage",
+ URL: "/some/other/path/",
+ Type: output.JSONFormat}, TargetPaths{TargetFilename: "/some/other/path/index.json", SubResourceBaseTarget: "/some/other/path", Link: "/some/other/path/index.json"}},
+ {
+ "JSON page with URL without suffix and no trailing slash", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/sect/",
+ BaseName: "mypage",
+ URL: "/some/other/path",
+ Type: output.JSONFormat}, TargetPaths{TargetFilename: "/some/other/path/index.json", SubResourceBaseTarget: "/some/other/path", Link: "/some/other/path/index.json"}},
+ {
+ "HTML page with URL without suffix and no trailing slash", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/sect/",
+ BaseName: "mypage",
+ URL: "/some/other/path",
+ Type: output.HTMLFormat}, TargetPaths{TargetFilename: "/some/other/path/index.html", SubResourceBaseTarget: "/some/other/path", Link: "/some/other/path/"}},
+ {
+ "HTML page with expanded permalink", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/a/b",
+ BaseName: "mypage",
+ ExpandedPermalink: "/2017/10/my-title/",
+ Type: output.HTMLFormat}, TargetPaths{TargetFilename: "/2017/10/my-title/index.html", SubResourceBaseTarget: "/2017/10/my-title", Link: "/2017/10/my-title/"}},
+ {
+ "Paginated HTML home", TargetPathDescriptor{
+ Kind: KindHome,
+ BaseName: "_index",
+ Type: output.HTMLFormat,
+ Addends: "page/3"}, TargetPaths{TargetFilename: "/page/3/index.html", SubResourceBaseTarget: "/page/3", Link: "/page/3/"}},
+ {
+ "Paginated Taxonomy list", TargetPathDescriptor{
+ Kind: KindTaxonomy,
+ BaseName: "_index",
+ Sections: []string{"tags", "hugo"},
+ Type: output.HTMLFormat,
+ Addends: "page/3"}, TargetPaths{TargetFilename: "/tags/hugo/page/3/index.html", SubResourceBaseTarget: "/tags/hugo/page/3", Link: "/tags/hugo/page/3/"}},
+ {
+ "Regular page with addend", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/a/b",
+ BaseName: "mypage",
+ Addends: "c/d/e",
+ Type: output.HTMLFormat}, TargetPaths{TargetFilename: "/a/b/mypage/c/d/e/index.html", SubResourceBaseTarget: "/a/b/mypage/c/d/e", Link: "/a/b/mypage/c/d/e/"}},
+ }
+
+ for i, test := range tests {
+ t.Run(fmt.Sprintf("langPrefixPath=%s,langPrefixLink=%s,uglyURLs=%t,name=%s", langPrefixPath, langPrefixLink, uglyURLs, test.name),
+ func(t *testing.T) {
+
+ test.d.ForcePrefix = true
+ test.d.PathSpec = pathSpec
+ test.d.UglyURLs = uglyURLs
+ test.d.PrefixFilePath = langPrefixPath
+ test.d.PrefixLink = langPrefixLink
+ test.d.Dir = filepath.FromSlash(test.d.Dir)
+ isUgly := uglyURLs && !test.d.Type.NoUgly
+
+ expected := test.expected
+
+ // TODO(bep) simplify
+ if test.d.Kind == KindPage && test.d.BaseName == test.d.Type.BaseName {
+ } else if test.d.Kind == KindHome && test.d.Type.Path != "" {
+ } else if test.d.Type.MediaType.Suffix() != "" && (!strings.HasPrefix(expected.TargetFilename, "/index") || test.d.Addends != "") && test.d.URL == "" && isUgly {
+ expected.TargetFilename = strings.Replace(expected.TargetFilename,
+ "/"+test.d.Type.BaseName+"."+test.d.Type.MediaType.Suffix(),
+ "."+test.d.Type.MediaType.Suffix(), 1)
+ expected.Link = strings.TrimSuffix(expected.Link, "/") + "." + test.d.Type.MediaType.Suffix()
+
+ }
+
+ if test.d.PrefixFilePath != "" && !strings.HasPrefix(test.d.URL, "/"+test.d.PrefixFilePath) {
+ expected.TargetFilename = "/" + test.d.PrefixFilePath + expected.TargetFilename
+ expected.SubResourceBaseTarget = "/" + test.d.PrefixFilePath + expected.SubResourceBaseTarget
+ }
+
+ if test.d.PrefixLink != "" && !strings.HasPrefix(test.d.URL, "/"+test.d.PrefixLink) {
+ expected.Link = "/" + test.d.PrefixLink + expected.Link
+ }
+
+ expected.TargetFilename = filepath.FromSlash(expected.TargetFilename)
+ expected.SubResourceBaseTarget = filepath.FromSlash(expected.SubResourceBaseTarget)
+
+ pagePath := CreateTargetPaths(test.d)
+
+ if !eqTargetPaths(pagePath, expected) {
+ t.Fatalf("[%d] [%s] targetPath expected\n%#v, got:\n%#v", i, test.name, expected, pagePath)
+
+ }
+ })
+ }
+ }
+
+ }
+ }
+}
+
+func TestPageTargetPathPrefix(t *testing.T) {
+ pathSpec := newTestPathSpec()
+ tests := []struct {
+ name string
+ d TargetPathDescriptor
+ expected TargetPaths
+ }{
+ {"URL set, prefix both, no force", TargetPathDescriptor{Kind: KindPage, Type: output.JSONFormat, URL: "/mydir/my.json", ForcePrefix: false, PrefixFilePath: "pf", PrefixLink: "pl"},
+ TargetPaths{TargetFilename: "/mydir/my.json", SubResourceBaseTarget: "/mydir", SubResourceBaseLink: "/mydir", Link: "/mydir/my.json"}},
+ {"URL set, prefix both, force", TargetPathDescriptor{Kind: KindPage, Type: output.JSONFormat, URL: "/mydir/my.json", ForcePrefix: true, PrefixFilePath: "pf", PrefixLink: "pl"},
+ TargetPaths{TargetFilename: "/pf/mydir/my.json", SubResourceBaseTarget: "/pf/mydir", SubResourceBaseLink: "/pl/mydir", Link: "/pl/mydir/my.json"}},
+ }
+
+ for i, test := range tests {
+ t.Run(fmt.Sprintf(test.name),
+ func(t *testing.T) {
+ test.d.PathSpec = pathSpec
+ expected := test.expected
+ expected.TargetFilename = filepath.FromSlash(expected.TargetFilename)
+ expected.SubResourceBaseTarget = filepath.FromSlash(expected.SubResourceBaseTarget)
+
+ pagePath := CreateTargetPaths(test.d)
+
+ if pagePath != expected {
+ t.Fatalf("[%d] [%s] targetPath expected\n%#v, got:\n%#v", i, test.name, expected, pagePath)
+ }
+ })
+ }
+
+}
+
+func eqTargetPaths(p1, p2 TargetPaths) bool {
+
+ if p1.Link != p2.Link {
+ return false
+ }
+
+ if p1.SubResourceBaseTarget != p2.SubResourceBaseTarget {
+ return false
+ }
+
+ if p1.TargetFilename != p2.TargetFilename {
+ return false
+ }
+
+ return true
+}
diff --git a/resources/page/page_wrappers.autogen.go b/resources/page/page_wrappers.autogen.go
new file mode 100644
index 000000000..c08da3e8b
--- /dev/null
+++ b/resources/page/page_wrappers.autogen.go
@@ -0,0 +1,97 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file is autogenerated.
+
+package page
+
+import (
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/gohugoio/hugo/helpers"
+ "html/template"
+ "os"
+)
+
+// NewDeprecatedWarningPage adds deprecation warnings to the given implementation.
+func NewDeprecatedWarningPage(p DeprecatedWarningPageMethods) DeprecatedWarningPageMethods {
+ return &pageDeprecated{p: p}
+}
+
+type pageDeprecated struct {
+ p DeprecatedWarningPageMethods
+}
+
+func (p *pageDeprecated) Filename() string {
+ helpers.Deprecated("Page", ".Filename", "Use .File.Filename", false)
+ return p.p.Filename()
+}
+func (p *pageDeprecated) Dir() string {
+ helpers.Deprecated("Page", ".Dir", "Use .File.Dir", false)
+ return p.p.Dir()
+}
+func (p *pageDeprecated) IsDraft() bool {
+ helpers.Deprecated("Page", ".IsDraft", "Use .Draft.", false)
+ return p.p.IsDraft()
+}
+func (p *pageDeprecated) Extension() string {
+ helpers.Deprecated("Page", ".Extension", "Use .File.Extension", false)
+ return p.p.Extension()
+}
+func (p *pageDeprecated) Hugo() hugo.Info {
+ helpers.Deprecated("Page", ".Hugo", "Use the global hugo function.", false)
+ return p.p.Hugo()
+}
+func (p *pageDeprecated) Ext() string {
+ helpers.Deprecated("Page", ".Ext", "Use .File.Ext", false)
+ return p.p.Ext()
+}
+func (p *pageDeprecated) LanguagePrefix() string {
+ helpers.Deprecated("Page", ".LanguagePrefix", "Use .Site.LanguagePrefix.", false)
+ return p.p.LanguagePrefix()
+}
+func (p *pageDeprecated) GetParam(arg0 string) interface{} {
+ helpers.Deprecated("Page", ".GetParam", "Use .Param or .Params.myParam.", false)
+ return p.p.GetParam(arg0)
+}
+func (p *pageDeprecated) LogicalName() string {
+ helpers.Deprecated("Page", ".LogicalName", "Use .File.LogicalName", false)
+ return p.p.LogicalName()
+}
+func (p *pageDeprecated) BaseFileName() string {
+ helpers.Deprecated("Page", ".BaseFileName", "Use .File.BaseFileName", false)
+ return p.p.BaseFileName()
+}
+func (p *pageDeprecated) RSSLink() template.URL {
+ helpers.Deprecated("Page", ".RSSLink", "Use the Output Format's link, e.g. something like: \n {{ with .OutputFormats.Get \"RSS\" }}{{ . RelPermalink }}{{ end }}", false)
+ return p.p.RSSLink()
+}
+func (p *pageDeprecated) TranslationBaseName() string {
+ helpers.Deprecated("Page", ".TranslationBaseName", "Use .File.TranslationBaseName", false)
+ return p.p.TranslationBaseName()
+}
+func (p *pageDeprecated) URL() string {
+ helpers.Deprecated("Page", ".URL", "Use .Permalink or .RelPermalink. If what you want is the front matter URL value, use .Params.url", false)
+ return p.p.URL()
+}
+func (p *pageDeprecated) ContentBaseName() string {
+ helpers.Deprecated("Page", ".ContentBaseName", "Use .File.ContentBaseName", false)
+ return p.p.ContentBaseName()
+}
+func (p *pageDeprecated) UniqueID() string {
+ helpers.Deprecated("Page", ".UniqueID", "Use .File.UniqueID", false)
+ return p.p.UniqueID()
+}
+func (p *pageDeprecated) FileInfo() os.FileInfo {
+ helpers.Deprecated("Page", ".FileInfo", "Use .File.FileInfo", false)
+ return p.p.FileInfo()
+}
diff --git a/resources/page/pagegroup.go b/resources/page/pagegroup.go
new file mode 100644
index 000000000..46d9bd174
--- /dev/null
+++ b/resources/page/pagegroup.go
@@ -0,0 +1,369 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "sort"
+ "strings"
+ "time"
+
+ "github.com/gohugoio/hugo/common/collections"
+
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+var (
+ _ collections.Slicer = PageGroup{}
+)
+
+// PageGroup represents a group of pages, grouped by the key.
+// The key is typically a year or similar.
+type PageGroup struct {
+ Key interface{}
+ Pages
+}
+
+type mapKeyValues []reflect.Value
+
+func (v mapKeyValues) Len() int { return len(v) }
+func (v mapKeyValues) Swap(i, j int) { v[i], v[j] = v[j], v[i] }
+
+type mapKeyByInt struct{ mapKeyValues }
+
+func (s mapKeyByInt) Less(i, j int) bool { return s.mapKeyValues[i].Int() < s.mapKeyValues[j].Int() }
+
+type mapKeyByStr struct{ mapKeyValues }
+
+func (s mapKeyByStr) Less(i, j int) bool {
+ return s.mapKeyValues[i].String() < s.mapKeyValues[j].String()
+}
+
+func sortKeys(v []reflect.Value, order string) []reflect.Value {
+ if len(v) <= 1 {
+ return v
+ }
+
+ switch v[0].Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ if order == "desc" {
+ sort.Sort(sort.Reverse(mapKeyByInt{v}))
+ } else {
+ sort.Sort(mapKeyByInt{v})
+ }
+ case reflect.String:
+ if order == "desc" {
+ sort.Sort(sort.Reverse(mapKeyByStr{v}))
+ } else {
+ sort.Sort(mapKeyByStr{v})
+ }
+ }
+ return v
+}
+
+// PagesGroup represents a list of page groups.
+// This is what you get when doing page grouping in the templates.
+type PagesGroup []PageGroup
+
+// Reverse reverses the order of this list of page groups.
+func (p PagesGroup) Reverse() PagesGroup {
+ for i, j := 0, len(p)-1; i < j; i, j = i+1, j-1 {
+ p[i], p[j] = p[j], p[i]
+ }
+
+ return p
+}
+
+var (
+ errorType = reflect.TypeOf((*error)(nil)).Elem()
+ pagePtrType = reflect.TypeOf((*Page)(nil)).Elem()
+ pagesType = reflect.TypeOf(Pages{})
+)
+
+// GroupBy groups by the value in the given field or method name and with the given order.
+// Valid values for order is asc, desc, rev and reverse.
+func (p Pages) GroupBy(key string, order ...string) (PagesGroup, error) {
+ if len(p) < 1 {
+ return nil, nil
+ }
+
+ direction := "asc"
+
+ if len(order) > 0 && (strings.ToLower(order[0]) == "desc" || strings.ToLower(order[0]) == "rev" || strings.ToLower(order[0]) == "reverse") {
+ direction = "desc"
+ }
+
+ var ft interface{}
+ m, ok := pagePtrType.MethodByName(key)
+ if ok {
+ if m.Type.NumOut() == 0 || m.Type.NumOut() > 2 {
+ return nil, errors.New(key + " is a Page method but you can't use it with GroupBy")
+ }
+ if m.Type.NumOut() == 1 && m.Type.Out(0).Implements(errorType) {
+ return nil, errors.New(key + " is a Page method but you can't use it with GroupBy")
+ }
+ if m.Type.NumOut() == 2 && !m.Type.Out(1).Implements(errorType) {
+ return nil, errors.New(key + " is a Page method but you can't use it with GroupBy")
+ }
+ ft = m
+ } else {
+ ft, ok = pagePtrType.Elem().FieldByName(key)
+ if !ok {
+ return nil, errors.New(key + " is neither a field nor a method of Page")
+ }
+ }
+
+ var tmp reflect.Value
+ switch e := ft.(type) {
+ case reflect.StructField:
+ tmp = reflect.MakeMap(reflect.MapOf(e.Type, pagesType))
+ case reflect.Method:
+ tmp = reflect.MakeMap(reflect.MapOf(e.Type.Out(0), pagesType))
+ }
+
+ for _, e := range p {
+ ppv := reflect.ValueOf(e)
+ var fv reflect.Value
+ switch ft.(type) {
+ case reflect.StructField:
+ fv = ppv.Elem().FieldByName(key)
+ case reflect.Method:
+ fv = ppv.MethodByName(key).Call([]reflect.Value{})[0]
+ }
+ if !fv.IsValid() {
+ continue
+ }
+ if !tmp.MapIndex(fv).IsValid() {
+ tmp.SetMapIndex(fv, reflect.MakeSlice(pagesType, 0, 0))
+ }
+ tmp.SetMapIndex(fv, reflect.Append(tmp.MapIndex(fv), ppv))
+ }
+
+ sortedKeys := sortKeys(tmp.MapKeys(), direction)
+ r := make([]PageGroup, len(sortedKeys))
+ for i, k := range sortedKeys {
+ r[i] = PageGroup{Key: k.Interface(), Pages: tmp.MapIndex(k).Interface().(Pages)}
+ }
+
+ return r, nil
+}
+
+// GroupByParam groups by the given page parameter key's value and with the given order.
+// Valid values for order is asc, desc, rev and reverse.
+func (p Pages) GroupByParam(key string, order ...string) (PagesGroup, error) {
+ if len(p) < 1 {
+ return nil, nil
+ }
+
+ direction := "asc"
+
+ if len(order) > 0 && (strings.ToLower(order[0]) == "desc" || strings.ToLower(order[0]) == "rev" || strings.ToLower(order[0]) == "reverse") {
+ direction = "desc"
+ }
+
+ var tmp reflect.Value
+ var keyt reflect.Type
+ for _, e := range p {
+ param := resource.GetParamToLower(e, key)
+ if param != nil {
+ if _, ok := param.([]string); !ok {
+ keyt = reflect.TypeOf(param)
+ tmp = reflect.MakeMap(reflect.MapOf(keyt, pagesType))
+ break
+ }
+ }
+ }
+ if !tmp.IsValid() {
+ return nil, errors.New("there is no such a param")
+ }
+
+ for _, e := range p {
+ param := resource.GetParam(e, key)
+
+ if param == nil || reflect.TypeOf(param) != keyt {
+ continue
+ }
+ v := reflect.ValueOf(param)
+ if !tmp.MapIndex(v).IsValid() {
+ tmp.SetMapIndex(v, reflect.MakeSlice(pagesType, 0, 0))
+ }
+ tmp.SetMapIndex(v, reflect.Append(tmp.MapIndex(v), reflect.ValueOf(e)))
+ }
+
+ var r []PageGroup
+ for _, k := range sortKeys(tmp.MapKeys(), direction) {
+ r = append(r, PageGroup{Key: k.Interface(), Pages: tmp.MapIndex(k).Interface().(Pages)})
+ }
+
+ return r, nil
+}
+
+func (p Pages) groupByDateField(sorter func(p Pages) Pages, formatter func(p Page) string, order ...string) (PagesGroup, error) {
+ if len(p) < 1 {
+ return nil, nil
+ }
+
+ sp := sorter(p)
+
+ if !(len(order) > 0 && (strings.ToLower(order[0]) == "asc" || strings.ToLower(order[0]) == "rev" || strings.ToLower(order[0]) == "reverse")) {
+ sp = sp.Reverse()
+ }
+
+ date := formatter(sp[0].(Page))
+ var r []PageGroup
+ r = append(r, PageGroup{Key: date, Pages: make(Pages, 0)})
+ r[0].Pages = append(r[0].Pages, sp[0])
+
+ i := 0
+ for _, e := range sp[1:] {
+ date = formatter(e.(Page))
+ if r[i].Key.(string) != date {
+ r = append(r, PageGroup{Key: date})
+ i++
+ }
+ r[i].Pages = append(r[i].Pages, e)
+ }
+ return r, nil
+}
+
+// GroupByDate groups by the given page's Date value in
+// the given format and with the given order.
+// Valid values for order is asc, desc, rev and reverse.
+// For valid format strings, see https://golang.org/pkg/time/#Time.Format
+func (p Pages) GroupByDate(format string, order ...string) (PagesGroup, error) {
+ sorter := func(p Pages) Pages {
+ return p.ByDate()
+ }
+ formatter := func(p Page) string {
+ return p.Date().Format(format)
+ }
+ return p.groupByDateField(sorter, formatter, order...)
+}
+
+// GroupByPublishDate groups by the given page's PublishDate value in
+// the given format and with the given order.
+// Valid values for order is asc, desc, rev and reverse.
+// For valid format strings, see https://golang.org/pkg/time/#Time.Format
+func (p Pages) GroupByPublishDate(format string, order ...string) (PagesGroup, error) {
+ sorter := func(p Pages) Pages {
+ return p.ByPublishDate()
+ }
+ formatter := func(p Page) string {
+ return p.PublishDate().Format(format)
+ }
+ return p.groupByDateField(sorter, formatter, order...)
+}
+
+// GroupByExpiryDate groups by the given page's ExpireDate value in
+// the given format and with the given order.
+// Valid values for order is asc, desc, rev and reverse.
+// For valid format strings, see https://golang.org/pkg/time/#Time.Format
+func (p Pages) GroupByExpiryDate(format string, order ...string) (PagesGroup, error) {
+ sorter := func(p Pages) Pages {
+ return p.ByExpiryDate()
+ }
+ formatter := func(p Page) string {
+ return p.ExpiryDate().Format(format)
+ }
+ return p.groupByDateField(sorter, formatter, order...)
+}
+
+// GroupByParamDate groups by a date set as a param on the page in
+// the given format and with the given order.
+// Valid values for order is asc, desc, rev and reverse.
+// For valid format strings, see https://golang.org/pkg/time/#Time.Format
+func (p Pages) GroupByParamDate(key string, format string, order ...string) (PagesGroup, error) {
+ sorter := func(p Pages) Pages {
+ var r Pages
+ for _, e := range p {
+ param := resource.GetParamToLower(e, key)
+ if _, ok := param.(time.Time); ok {
+ r = append(r, e)
+ }
+ }
+ pdate := func(p1, p2 Page) bool {
+ p1p, p2p := p1.(Page), p2.(Page)
+ return resource.GetParamToLower(p1p, key).(time.Time).Unix() < resource.GetParamToLower(p2p, key).(time.Time).Unix()
+ }
+ pageBy(pdate).Sort(r)
+ return r
+ }
+ formatter := func(p Page) string {
+ return resource.GetParamToLower(p, key).(time.Time).Format(format)
+ }
+ return p.groupByDateField(sorter, formatter, order...)
+}
+
+// Slice is not meant to be used externally. It's a bridge function
+// for the template functions. See collections.Slice.
+func (p PageGroup) Slice(in interface{}) (interface{}, error) {
+ switch items := in.(type) {
+ case PageGroup:
+ return items, nil
+ case []interface{}:
+ groups := make(PagesGroup, len(items))
+ for i, v := range items {
+ g, ok := v.(PageGroup)
+ if !ok {
+ return nil, fmt.Errorf("type %T is not a PageGroup", v)
+ }
+ groups[i] = g
+ }
+ return groups, nil
+ default:
+ return nil, fmt.Errorf("invalid slice type %T", items)
+ }
+}
+
+// Len returns the number of pages in the page group.
+func (psg PagesGroup) Len() int {
+ l := 0
+ for _, pg := range psg {
+ l += len(pg.Pages)
+ }
+ return l
+}
+
+// ToPagesGroup tries to convert seq into a PagesGroup.
+func ToPagesGroup(seq interface{}) (PagesGroup, error) {
+ switch v := seq.(type) {
+ case nil:
+ return nil, nil
+ case PagesGroup:
+ return v, nil
+ case []PageGroup:
+ return PagesGroup(v), nil
+ case []interface{}:
+ l := len(v)
+ if l == 0 {
+ break
+ }
+ switch v[0].(type) {
+ case PageGroup:
+ pagesGroup := make(PagesGroup, l)
+ for i, ipg := range v {
+ if pg, ok := ipg.(PageGroup); ok {
+ pagesGroup[i] = pg
+ } else {
+ return nil, fmt.Errorf("unsupported type in paginate from slice, got %T instead of PageGroup", ipg)
+ }
+ }
+ return pagesGroup, nil
+ }
+ }
+
+ return nil, nil
+}
diff --git a/resources/page/pagegroup_test.go b/resources/page/pagegroup_test.go
new file mode 100644
index 000000000..51ac09034
--- /dev/null
+++ b/resources/page/pagegroup_test.go
@@ -0,0 +1,409 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "reflect"
+ "strings"
+ "testing"
+
+ "github.com/spf13/cast"
+ "github.com/stretchr/testify/require"
+)
+
+type pageGroupTestObject struct {
+ path string
+ weight int
+ date string
+ param string
+}
+
+var pageGroupTestSources = []pageGroupTestObject{
+ {"/section1/testpage1.md", 3, "2012-04-06", "foo"},
+ {"/section1/testpage2.md", 3, "2012-01-01", "bar"},
+ {"/section1/testpage3.md", 2, "2012-04-06", "foo"},
+ {"/section2/testpage4.md", 1, "2012-03-02", "bar"},
+ {"/section2/testpage5.md", 1, "2012-04-06", "baz"},
+}
+
+func preparePageGroupTestPages(t *testing.T) Pages {
+ var pages Pages
+ for _, src := range pageGroupTestSources {
+ p := newTestPage()
+ p.path = src.path
+ if p.path != "" {
+ p.section = strings.Split(strings.TrimPrefix(p.path, "/"), "/")[0]
+ }
+ p.weight = src.weight
+ p.date = cast.ToTime(src.date)
+ p.pubDate = cast.ToTime(src.date)
+ p.expiryDate = cast.ToTime(src.date)
+ p.params["custom_param"] = src.param
+ p.params["custom_date"] = cast.ToTime(src.date)
+ pages = append(pages, p)
+ }
+ return pages
+}
+
+func TestGroupByWithFieldNameArg(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: 1, Pages: Pages{pages[3], pages[4]}},
+ {Key: 2, Pages: Pages{pages[2]}},
+ {Key: 3, Pages: Pages{pages[0], pages[1]}},
+ }
+
+ groups, err := pages.GroupBy("Weight")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByWithMethodNameArg(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "section1", Pages: Pages{pages[0], pages[1], pages[2]}},
+ {Key: "section2", Pages: Pages{pages[3], pages[4]}},
+ }
+
+ groups, err := pages.GroupBy("Type")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByWithSectionArg(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "section1", Pages: Pages{pages[0], pages[1], pages[2]}},
+ {Key: "section2", Pages: Pages{pages[3], pages[4]}},
+ }
+
+ groups, err := pages.GroupBy("Section")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be\n%#v, got\n%#v", expect, groups)
+ }
+}
+
+func TestGroupByInReverseOrder(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: 3, Pages: Pages{pages[0], pages[1]}},
+ {Key: 2, Pages: Pages{pages[2]}},
+ {Key: 1, Pages: Pages{pages[3], pages[4]}},
+ }
+
+ groups, err := pages.GroupBy("Weight", "desc")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByCalledWithEmptyPages(t *testing.T) {
+ t.Parallel()
+ var pages Pages
+ groups, err := pages.GroupBy("Weight")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if groups != nil {
+ t.Errorf("PagesGroup isn't empty. It should be %#v, got %#v", nil, groups)
+ }
+}
+
+func TestGroupByParamCalledWithUnavailableKey(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ _, err := pages.GroupByParam("UnavailableKey")
+ if err == nil {
+ t.Errorf("GroupByParam should return an error but didn't")
+ }
+}
+
+func TestReverse(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+
+ groups1, err := pages.GroupBy("Weight", "desc")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+
+ groups2, err := pages.GroupBy("Weight")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ groups2 = groups2.Reverse()
+
+ if !reflect.DeepEqual(groups2, groups1) {
+ t.Errorf("PagesGroup is sorted in unexpected order. It should be %#v, got %#v", groups2, groups1)
+ }
+}
+
+func TestGroupByParam(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "bar", Pages: Pages{pages[1], pages[3]}},
+ {Key: "baz", Pages: Pages{pages[4]}},
+ {Key: "foo", Pages: Pages{pages[0], pages[2]}},
+ }
+
+ groups, err := pages.GroupByParam("custom_param")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByParamInReverseOrder(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "foo", Pages: Pages{pages[0], pages[2]}},
+ {Key: "baz", Pages: Pages{pages[4]}},
+ {Key: "bar", Pages: Pages{pages[1], pages[3]}},
+ }
+
+ groups, err := pages.GroupByParam("custom_param", "desc")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByParamCalledWithCapitalLetterString(t *testing.T) {
+ assert := require.New(t)
+ testStr := "TestString"
+ p := newTestPage()
+ p.params["custom_param"] = testStr
+ pages := Pages{p}
+
+ groups, err := pages.GroupByParam("custom_param")
+
+ assert.NoError(err)
+ assert.Equal(testStr, groups[0].Key)
+
+}
+
+func TestGroupByParamCalledWithSomeUnavailableParams(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ delete(pages[1].Params(), "custom_param")
+ delete(pages[3].Params(), "custom_param")
+ delete(pages[4].Params(), "custom_param")
+
+ expect := PagesGroup{
+ {Key: "foo", Pages: Pages{pages[0], pages[2]}},
+ }
+
+ groups, err := pages.GroupByParam("custom_param")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByParamCalledWithEmptyPages(t *testing.T) {
+ t.Parallel()
+ var pages Pages
+ groups, err := pages.GroupByParam("custom_param")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if groups != nil {
+ t.Errorf("PagesGroup isn't empty. It should be %#v, got %#v", nil, groups)
+ }
+}
+
+func TestGroupByParamCalledWithUnavailableParam(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ _, err := pages.GroupByParam("unavailable_param")
+ if err == nil {
+ t.Errorf("GroupByParam should return an error but didn't")
+ }
+}
+
+func TestGroupByDate(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
+ {Key: "2012-03", Pages: Pages{pages[3]}},
+ {Key: "2012-01", Pages: Pages{pages[1]}},
+ }
+
+ groups, err := pages.GroupByDate("2006-01")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByDateInReverseOrder(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "2012-01", Pages: Pages{pages[1]}},
+ {Key: "2012-03", Pages: Pages{pages[3]}},
+ {Key: "2012-04", Pages: Pages{pages[0], pages[2], pages[4]}},
+ }
+
+ groups, err := pages.GroupByDate("2006-01", "asc")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByPublishDate(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
+ {Key: "2012-03", Pages: Pages{pages[3]}},
+ {Key: "2012-01", Pages: Pages{pages[1]}},
+ }
+
+ groups, err := pages.GroupByPublishDate("2006-01")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByPublishDateInReverseOrder(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "2012-01", Pages: Pages{pages[1]}},
+ {Key: "2012-03", Pages: Pages{pages[3]}},
+ {Key: "2012-04", Pages: Pages{pages[0], pages[2], pages[4]}},
+ }
+
+ groups, err := pages.GroupByDate("2006-01", "asc")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByPublishDateWithEmptyPages(t *testing.T) {
+ t.Parallel()
+ var pages Pages
+ groups, err := pages.GroupByPublishDate("2006-01")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if groups != nil {
+ t.Errorf("PagesGroup isn't empty. It should be %#v, got %#v", nil, groups)
+ }
+}
+
+func TestGroupByExpiryDate(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
+ {Key: "2012-03", Pages: Pages{pages[3]}},
+ {Key: "2012-01", Pages: Pages{pages[1]}},
+ }
+
+ groups, err := pages.GroupByExpiryDate("2006-01")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByParamDate(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
+ {Key: "2012-03", Pages: Pages{pages[3]}},
+ {Key: "2012-01", Pages: Pages{pages[1]}},
+ }
+
+ groups, err := pages.GroupByParamDate("custom_date", "2006-01")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByParamDateInReverseOrder(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "2012-01", Pages: Pages{pages[1]}},
+ {Key: "2012-03", Pages: Pages{pages[3]}},
+ {Key: "2012-04", Pages: Pages{pages[0], pages[2], pages[4]}},
+ }
+
+ groups, err := pages.GroupByParamDate("custom_date", "2006-01", "asc")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByParamDateWithEmptyPages(t *testing.T) {
+ t.Parallel()
+ var pages Pages
+ groups, err := pages.GroupByParamDate("custom_date", "2006-01")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if groups != nil {
+ t.Errorf("PagesGroup isn't empty. It should be %#v, got %#v", nil, groups)
+ }
+}
diff --git a/resources/page/pagemeta/page_frontmatter.go b/resources/page/pagemeta/page_frontmatter.go
new file mode 100644
index 000000000..1ce3fbee4
--- /dev/null
+++ b/resources/page/pagemeta/page_frontmatter.go
@@ -0,0 +1,427 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pagemeta
+
+import (
+ "strings"
+ "time"
+
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/resources/resource"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/spf13/cast"
+)
+
+// FrontMatterHandler maps front matter into Page fields and .Params.
+// Note that we currently have only extracted the date logic.
+type FrontMatterHandler struct {
+ fmConfig frontmatterConfig
+
+ dateHandler frontMatterFieldHandler
+ lastModHandler frontMatterFieldHandler
+ publishDateHandler frontMatterFieldHandler
+ expiryDateHandler frontMatterFieldHandler
+
+ // A map of all date keys configured, including any custom.
+ allDateKeys map[string]bool
+
+ logger *loggers.Logger
+}
+
+// FrontMatterDescriptor describes how to handle front matter for a given Page.
+// It has pointers to values in the receiving page which gets updated.
+type FrontMatterDescriptor struct {
+
+ // This the Page's front matter.
+ Frontmatter map[string]interface{}
+
+ // This is the Page's base filename (BaseFilename), e.g. page.md., or
+ // if page is a leaf bundle, the bundle folder name (ContentBaseName).
+ BaseFilename string
+
+ // The content file's mod time.
+ ModTime time.Time
+
+ // May be set from the author date in Git.
+ GitAuthorDate time.Time
+
+ // The below are pointers to values on Page and will be modified.
+
+ // This is the Page's params.
+ Params map[string]interface{}
+
+ // This is the Page's dates.
+ Dates *resource.Dates
+
+ // This is the Page's Slug etc.
+ PageURLs *URLPath
+}
+
+var (
+ dateFieldAliases = map[string][]string{
+ fmDate: {},
+ fmLastmod: {"modified"},
+ fmPubDate: {"pubdate", "published"},
+ fmExpiryDate: {"unpublishdate"},
+ }
+)
+
+// HandleDates updates all the dates given the current configuration and the
+// supplied front matter params. Note that this requires all lower-case keys
+// in the params map.
+func (f FrontMatterHandler) HandleDates(d *FrontMatterDescriptor) error {
+ if d.Dates == nil {
+ panic("missing dates")
+ }
+
+ if f.dateHandler == nil {
+ panic("missing date handler")
+ }
+
+ if _, err := f.dateHandler(d); err != nil {
+ return err
+ }
+
+ if _, err := f.lastModHandler(d); err != nil {
+ return err
+ }
+
+ if _, err := f.publishDateHandler(d); err != nil {
+ return err
+ }
+
+ if _, err := f.expiryDateHandler(d); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// IsDateKey returns whether the given front matter key is considered a date by the current
+// configuration.
+func (f FrontMatterHandler) IsDateKey(key string) bool {
+ return f.allDateKeys[key]
+}
+
+// A Zero date is a signal that the name can not be parsed.
+// This follows the format as outlined in Jekyll, https://jekyllrb.com/docs/posts/:
+// "Where YEAR is a four-digit number, MONTH and DAY are both two-digit numbers"
+func dateAndSlugFromBaseFilename(name string) (time.Time, string) {
+ withoutExt, _ := helpers.FileAndExt(name)
+
+ if len(withoutExt) < 10 {
+ // This can not be a date.
+ return time.Time{}, ""
+ }
+
+ // Note: Hugo currently have no custom timezone support.
+ // We will have to revisit this when that is in place.
+ d, err := time.Parse("2006-01-02", withoutExt[:10])
+ if err != nil {
+ return time.Time{}, ""
+ }
+
+ // Be a little lenient with the format here.
+ slug := strings.Trim(withoutExt[10:], " -_")
+
+ return d, slug
+}
+
+type frontMatterFieldHandler func(d *FrontMatterDescriptor) (bool, error)
+
+func (f FrontMatterHandler) newChainedFrontMatterFieldHandler(handlers ...frontMatterFieldHandler) frontMatterFieldHandler {
+ return func(d *FrontMatterDescriptor) (bool, error) {
+ for _, h := range handlers {
+ // First successful handler wins.
+ success, err := h(d)
+ if err != nil {
+ f.logger.ERROR.Println(err)
+ } else if success {
+ return true, nil
+ }
+ }
+ return false, nil
+ }
+}
+
+type frontmatterConfig struct {
+ date []string
+ lastmod []string
+ publishDate []string
+ expiryDate []string
+}
+
+const (
+ // These are all the date handler identifiers
+ // All identifiers not starting with a ":" maps to a front matter parameter.
+ fmDate = "date"
+ fmPubDate = "publishdate"
+ fmLastmod = "lastmod"
+ fmExpiryDate = "expirydate"
+
+ // Gets date from filename, e.g 218-02-22-mypage.md
+ fmFilename = ":filename"
+
+ // Gets date from file OS mod time.
+ fmModTime = ":filemodtime"
+
+ // Gets date from Git
+ fmGitAuthorDate = ":git"
+)
+
+// This is the config you get when doing nothing.
+func newDefaultFrontmatterConfig() frontmatterConfig {
+ return frontmatterConfig{
+ date: []string{fmDate, fmPubDate, fmLastmod},
+ lastmod: []string{fmGitAuthorDate, fmLastmod, fmDate, fmPubDate},
+ publishDate: []string{fmPubDate, fmDate},
+ expiryDate: []string{fmExpiryDate},
+ }
+}
+
+func newFrontmatterConfig(cfg config.Provider) (frontmatterConfig, error) {
+ c := newDefaultFrontmatterConfig()
+ defaultConfig := c
+
+ if cfg.IsSet("frontmatter") {
+ fm := cfg.GetStringMap("frontmatter")
+ for k, v := range fm {
+ loki := strings.ToLower(k)
+ switch loki {
+ case fmDate:
+ c.date = toLowerSlice(v)
+ case fmPubDate:
+ c.publishDate = toLowerSlice(v)
+ case fmLastmod:
+ c.lastmod = toLowerSlice(v)
+ case fmExpiryDate:
+ c.expiryDate = toLowerSlice(v)
+ }
+ }
+ }
+
+ expander := func(c, d []string) []string {
+ out := expandDefaultValues(c, d)
+ out = addDateFieldAliases(out)
+ return out
+ }
+
+ c.date = expander(c.date, defaultConfig.date)
+ c.publishDate = expander(c.publishDate, defaultConfig.publishDate)
+ c.lastmod = expander(c.lastmod, defaultConfig.lastmod)
+ c.expiryDate = expander(c.expiryDate, defaultConfig.expiryDate)
+
+ return c, nil
+}
+
+func addDateFieldAliases(values []string) []string {
+ var complete []string
+
+ for _, v := range values {
+ complete = append(complete, v)
+ if aliases, found := dateFieldAliases[v]; found {
+ complete = append(complete, aliases...)
+ }
+ }
+ return helpers.UniqueStrings(complete)
+}
+
+func expandDefaultValues(values []string, defaults []string) []string {
+ var out []string
+ for _, v := range values {
+ if v == ":default" {
+ out = append(out, defaults...)
+ } else {
+ out = append(out, v)
+ }
+ }
+ return out
+}
+
+func toLowerSlice(in interface{}) []string {
+ out := cast.ToStringSlice(in)
+ for i := 0; i < len(out); i++ {
+ out[i] = strings.ToLower(out[i])
+ }
+
+ return out
+}
+
+// NewFrontmatterHandler creates a new FrontMatterHandler with the given logger and configuration.
+// If no logger is provided, one will be created.
+func NewFrontmatterHandler(logger *loggers.Logger, cfg config.Provider) (FrontMatterHandler, error) {
+
+ if logger == nil {
+ logger = loggers.NewErrorLogger()
+ }
+
+ frontMatterConfig, err := newFrontmatterConfig(cfg)
+ if err != nil {
+ return FrontMatterHandler{}, err
+ }
+
+ allDateKeys := make(map[string]bool)
+ addKeys := func(vals []string) {
+ for _, k := range vals {
+ if !strings.HasPrefix(k, ":") {
+ allDateKeys[k] = true
+ }
+ }
+ }
+
+ addKeys(frontMatterConfig.date)
+ addKeys(frontMatterConfig.expiryDate)
+ addKeys(frontMatterConfig.lastmod)
+ addKeys(frontMatterConfig.publishDate)
+
+ f := FrontMatterHandler{logger: logger, fmConfig: frontMatterConfig, allDateKeys: allDateKeys}
+
+ if err := f.createHandlers(); err != nil {
+ return f, err
+ }
+
+ return f, nil
+}
+
+func (f *FrontMatterHandler) createHandlers() error {
+ var err error
+
+ if f.dateHandler, err = f.createDateHandler(f.fmConfig.date,
+ func(d *FrontMatterDescriptor, t time.Time) {
+ d.Dates.FDate = t
+ setParamIfNotSet(fmDate, t, d)
+ }); err != nil {
+ return err
+ }
+
+ if f.lastModHandler, err = f.createDateHandler(f.fmConfig.lastmod,
+ func(d *FrontMatterDescriptor, t time.Time) {
+ setParamIfNotSet(fmLastmod, t, d)
+ d.Dates.FLastmod = t
+ }); err != nil {
+ return err
+ }
+
+ if f.publishDateHandler, err = f.createDateHandler(f.fmConfig.publishDate,
+ func(d *FrontMatterDescriptor, t time.Time) {
+ setParamIfNotSet(fmPubDate, t, d)
+ d.Dates.FPublishDate = t
+ }); err != nil {
+ return err
+ }
+
+ if f.expiryDateHandler, err = f.createDateHandler(f.fmConfig.expiryDate,
+ func(d *FrontMatterDescriptor, t time.Time) {
+ setParamIfNotSet(fmExpiryDate, t, d)
+ d.Dates.FExpiryDate = t
+ }); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func setParamIfNotSet(key string, value interface{}, d *FrontMatterDescriptor) {
+ if _, found := d.Params[key]; found {
+ return
+ }
+ d.Params[key] = value
+}
+
+func (f FrontMatterHandler) createDateHandler(identifiers []string, setter func(d *FrontMatterDescriptor, t time.Time)) (frontMatterFieldHandler, error) {
+ var h *frontmatterFieldHandlers
+ var handlers []frontMatterFieldHandler
+
+ for _, identifier := range identifiers {
+ switch identifier {
+ case fmFilename:
+ handlers = append(handlers, h.newDateFilenameHandler(setter))
+ case fmModTime:
+ handlers = append(handlers, h.newDateModTimeHandler(setter))
+ case fmGitAuthorDate:
+ handlers = append(handlers, h.newDateGitAuthorDateHandler(setter))
+ default:
+ handlers = append(handlers, h.newDateFieldHandler(identifier, setter))
+ }
+ }
+
+ return f.newChainedFrontMatterFieldHandler(handlers...), nil
+
+}
+
+type frontmatterFieldHandlers int
+
+func (f *frontmatterFieldHandlers) newDateFieldHandler(key string, setter func(d *FrontMatterDescriptor, t time.Time)) frontMatterFieldHandler {
+ return func(d *FrontMatterDescriptor) (bool, error) {
+ v, found := d.Frontmatter[key]
+
+ if !found {
+ return false, nil
+ }
+
+ date, err := cast.ToTimeE(v)
+ if err != nil {
+ return false, nil
+ }
+
+ // We map several date keys to one, so, for example,
+ // "expirydate", "unpublishdate" will all set .ExpiryDate (first found).
+ setter(d, date)
+
+ // This is the params key as set in front matter.
+ d.Params[key] = date
+
+ return true, nil
+ }
+}
+
+func (f *frontmatterFieldHandlers) newDateFilenameHandler(setter func(d *FrontMatterDescriptor, t time.Time)) frontMatterFieldHandler {
+ return func(d *FrontMatterDescriptor) (bool, error) {
+ date, slug := dateAndSlugFromBaseFilename(d.BaseFilename)
+ if date.IsZero() {
+ return false, nil
+ }
+
+ setter(d, date)
+
+ if _, found := d.Frontmatter["slug"]; !found {
+ // Use slug from filename
+ d.PageURLs.Slug = slug
+ }
+
+ return true, nil
+ }
+}
+
+func (f *frontmatterFieldHandlers) newDateModTimeHandler(setter func(d *FrontMatterDescriptor, t time.Time)) frontMatterFieldHandler {
+ return func(d *FrontMatterDescriptor) (bool, error) {
+ if d.ModTime.IsZero() {
+ return false, nil
+ }
+ setter(d, d.ModTime)
+ return true, nil
+ }
+}
+
+func (f *frontmatterFieldHandlers) newDateGitAuthorDateHandler(setter func(d *FrontMatterDescriptor, t time.Time)) frontMatterFieldHandler {
+ return func(d *FrontMatterDescriptor) (bool, error) {
+ if d.GitAuthorDate.IsZero() {
+ return false, nil
+ }
+ setter(d, d.GitAuthorDate)
+ return true, nil
+ }
+}
diff --git a/resources/page/pagemeta/page_frontmatter_test.go b/resources/page/pagemeta/page_frontmatter_test.go
new file mode 100644
index 000000000..313f704d9
--- /dev/null
+++ b/resources/page/pagemeta/page_frontmatter_test.go
@@ -0,0 +1,262 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pagemeta
+
+import (
+ "fmt"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/resources/resource"
+ "github.com/spf13/viper"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestDateAndSlugFromBaseFilename(t *testing.T) {
+
+ t.Parallel()
+
+ assert := require.New(t)
+
+ tests := []struct {
+ name string
+ date string
+ slug string
+ }{
+ {"page.md", "0001-01-01", ""},
+ {"2012-09-12-page.md", "2012-09-12", "page"},
+ {"2018-02-28-page.md", "2018-02-28", "page"},
+ {"2018-02-28_page.md", "2018-02-28", "page"},
+ {"2018-02-28 page.md", "2018-02-28", "page"},
+ {"2018-02-28page.md", "2018-02-28", "page"},
+ {"2018-02-28-.md", "2018-02-28", ""},
+ {"2018-02-28-.md", "2018-02-28", ""},
+ {"2018-02-28.md", "2018-02-28", ""},
+ {"2018-02-28-page", "2018-02-28", "page"},
+ {"2012-9-12-page.md", "0001-01-01", ""},
+ {"asdfasdf.md", "0001-01-01", ""},
+ }
+
+ for i, test := range tests {
+ expecteFDate, err := time.Parse("2006-01-02", test.date)
+ assert.NoError(err)
+
+ errMsg := fmt.Sprintf("Test %d", i)
+ gotDate, gotSlug := dateAndSlugFromBaseFilename(test.name)
+
+ assert.Equal(expecteFDate, gotDate, errMsg)
+ assert.Equal(test.slug, gotSlug, errMsg)
+
+ }
+}
+
+func newTestFd() *FrontMatterDescriptor {
+ return &FrontMatterDescriptor{
+ Frontmatter: make(map[string]interface{}),
+ Params: make(map[string]interface{}),
+ Dates: &resource.Dates{},
+ PageURLs: &URLPath{},
+ }
+}
+
+func TestFrontMatterNewConfig(t *testing.T) {
+ assert := require.New(t)
+
+ cfg := viper.New()
+
+ cfg.Set("frontmatter", map[string]interface{}{
+ "date": []string{"publishDate", "LastMod"},
+ "Lastmod": []string{"publishDate"},
+ "expiryDate": []string{"lastMod"},
+ "publishDate": []string{"date"},
+ })
+
+ fc, err := newFrontmatterConfig(cfg)
+ assert.NoError(err)
+ assert.Equal([]string{"publishdate", "pubdate", "published", "lastmod", "modified"}, fc.date)
+ assert.Equal([]string{"publishdate", "pubdate", "published"}, fc.lastmod)
+ assert.Equal([]string{"lastmod", "modified"}, fc.expiryDate)
+ assert.Equal([]string{"date"}, fc.publishDate)
+
+ // Default
+ cfg = viper.New()
+ fc, err = newFrontmatterConfig(cfg)
+ assert.NoError(err)
+ assert.Equal([]string{"date", "publishdate", "pubdate", "published", "lastmod", "modified"}, fc.date)
+ assert.Equal([]string{":git", "lastmod", "modified", "date", "publishdate", "pubdate", "published"}, fc.lastmod)
+ assert.Equal([]string{"expirydate", "unpublishdate"}, fc.expiryDate)
+ assert.Equal([]string{"publishdate", "pubdate", "published", "date"}, fc.publishDate)
+
+ // :default keyword
+ cfg.Set("frontmatter", map[string]interface{}{
+ "date": []string{"d1", ":default"},
+ "lastmod": []string{"d2", ":default"},
+ "expiryDate": []string{"d3", ":default"},
+ "publishDate": []string{"d4", ":default"},
+ })
+ fc, err = newFrontmatterConfig(cfg)
+ assert.NoError(err)
+ assert.Equal([]string{"d1", "date", "publishdate", "pubdate", "published", "lastmod", "modified"}, fc.date)
+ assert.Equal([]string{"d2", ":git", "lastmod", "modified", "date", "publishdate", "pubdate", "published"}, fc.lastmod)
+ assert.Equal([]string{"d3", "expirydate", "unpublishdate"}, fc.expiryDate)
+ assert.Equal([]string{"d4", "publishdate", "pubdate", "published", "date"}, fc.publishDate)
+
+}
+
+func TestFrontMatterDatesHandlers(t *testing.T) {
+ assert := require.New(t)
+
+ for _, handlerID := range []string{":filename", ":fileModTime", ":git"} {
+
+ cfg := viper.New()
+
+ cfg.Set("frontmatter", map[string]interface{}{
+ "date": []string{handlerID, "date"},
+ })
+
+ handler, err := NewFrontmatterHandler(nil, cfg)
+ assert.NoError(err)
+
+ d1, _ := time.Parse("2006-01-02", "2018-02-01")
+ d2, _ := time.Parse("2006-01-02", "2018-02-02")
+
+ d := newTestFd()
+ switch strings.ToLower(handlerID) {
+ case ":filename":
+ d.BaseFilename = "2018-02-01-page.md"
+ case ":filemodtime":
+ d.ModTime = d1
+ case ":git":
+ d.GitAuthorDate = d1
+ }
+ d.Frontmatter["date"] = d2
+ assert.NoError(handler.HandleDates(d))
+ assert.Equal(d1, d.Dates.FDate)
+ assert.Equal(d2, d.Params["date"])
+
+ d = newTestFd()
+ d.Frontmatter["date"] = d2
+ assert.NoError(handler.HandleDates(d))
+ assert.Equal(d2, d.Dates.FDate)
+ assert.Equal(d2, d.Params["date"])
+
+ }
+}
+
+func TestFrontMatterDatesCustomConfig(t *testing.T) {
+ t.Parallel()
+
+ assert := require.New(t)
+
+ cfg := viper.New()
+ cfg.Set("frontmatter", map[string]interface{}{
+ "date": []string{"mydate"},
+ "lastmod": []string{"publishdate"},
+ "publishdate": []string{"publishdate"},
+ })
+
+ handler, err := NewFrontmatterHandler(nil, cfg)
+ assert.NoError(err)
+
+ testDate, err := time.Parse("2006-01-02", "2018-02-01")
+ assert.NoError(err)
+
+ d := newTestFd()
+ d.Frontmatter["mydate"] = testDate
+ testDate = testDate.Add(24 * time.Hour)
+ d.Frontmatter["date"] = testDate
+ testDate = testDate.Add(24 * time.Hour)
+ d.Frontmatter["lastmod"] = testDate
+ testDate = testDate.Add(24 * time.Hour)
+ d.Frontmatter["publishdate"] = testDate
+ testDate = testDate.Add(24 * time.Hour)
+ d.Frontmatter["expirydate"] = testDate
+
+ assert.NoError(handler.HandleDates(d))
+
+ assert.Equal(1, d.Dates.FDate.Day())
+ assert.Equal(4, d.Dates.FLastmod.Day())
+ assert.Equal(4, d.Dates.FPublishDate.Day())
+ assert.Equal(5, d.Dates.FExpiryDate.Day())
+
+ assert.Equal(d.Dates.FDate, d.Params["date"])
+ assert.Equal(d.Dates.FDate, d.Params["mydate"])
+ assert.Equal(d.Dates.FPublishDate, d.Params["publishdate"])
+ assert.Equal(d.Dates.FExpiryDate, d.Params["expirydate"])
+
+ assert.False(handler.IsDateKey("date")) // This looks odd, but is configured like this.
+ assert.True(handler.IsDateKey("mydate"))
+ assert.True(handler.IsDateKey("publishdate"))
+ assert.True(handler.IsDateKey("pubdate"))
+
+}
+
+func TestFrontMatterDatesDefaultKeyword(t *testing.T) {
+ t.Parallel()
+
+ assert := require.New(t)
+
+ cfg := viper.New()
+
+ cfg.Set("frontmatter", map[string]interface{}{
+ "date": []string{"mydate", ":default"},
+ "publishdate": []string{":default", "mypubdate"},
+ })
+
+ handler, err := NewFrontmatterHandler(nil, cfg)
+ assert.NoError(err)
+
+ testDate, _ := time.Parse("2006-01-02", "2018-02-01")
+ d := newTestFd()
+ d.Frontmatter["mydate"] = testDate
+ d.Frontmatter["date"] = testDate.Add(1 * 24 * time.Hour)
+ d.Frontmatter["mypubdate"] = testDate.Add(2 * 24 * time.Hour)
+ d.Frontmatter["publishdate"] = testDate.Add(3 * 24 * time.Hour)
+
+ assert.NoError(handler.HandleDates(d))
+
+ assert.Equal(1, d.Dates.FDate.Day())
+ assert.Equal(2, d.Dates.FLastmod.Day())
+ assert.Equal(4, d.Dates.FPublishDate.Day())
+ assert.True(d.Dates.FExpiryDate.IsZero())
+
+}
+
+func TestExpandDefaultValues(t *testing.T) {
+ assert := require.New(t)
+ assert.Equal([]string{"a", "b", "c", "d"}, expandDefaultValues([]string{"a", ":default", "d"}, []string{"b", "c"}))
+ assert.Equal([]string{"a", "b", "c"}, expandDefaultValues([]string{"a", "b", "c"}, []string{"a", "b", "c"}))
+ assert.Equal([]string{"b", "c", "a", "b", "c", "d"}, expandDefaultValues([]string{":default", "a", ":default", "d"}, []string{"b", "c"}))
+
+}
+
+func TestFrontMatterDateFieldHandler(t *testing.T) {
+ t.Parallel()
+
+ assert := require.New(t)
+
+ handlers := new(frontmatterFieldHandlers)
+
+ fd := newTestFd()
+ d, _ := time.Parse("2006-01-02", "2018-02-01")
+ fd.Frontmatter["date"] = d
+ h := handlers.newDateFieldHandler("date", func(d *FrontMatterDescriptor, t time.Time) { d.Dates.FDate = t })
+
+ handled, err := h(fd)
+ assert.True(handled)
+ assert.NoError(err)
+ assert.Equal(d, fd.Dates.FDate)
+}
diff --git a/resources/page/pagemeta/pagemeta.go b/resources/page/pagemeta/pagemeta.go
new file mode 100644
index 000000000..07e5c5673
--- /dev/null
+++ b/resources/page/pagemeta/pagemeta.go
@@ -0,0 +1,21 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pagemeta
+
+type URLPath struct {
+ URL string
+ Permalink string
+ Slug string
+ Section string
+}
diff --git a/resources/page/pages.go b/resources/page/pages.go
new file mode 100644
index 000000000..1f79932a9
--- /dev/null
+++ b/resources/page/pages.go
@@ -0,0 +1,115 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+ "math/rand"
+
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+var (
+ _ resource.ResourcesConverter = Pages{}
+)
+
+// Pages is a slice of pages. This is the most common list type in Hugo.
+type Pages []Page
+
+func (ps Pages) String() string {
+ return fmt.Sprintf("Pages(%d)", len(ps))
+}
+
+// Used in tests.
+func (ps Pages) shuffle() {
+ for i := range ps {
+ j := rand.Intn(i + 1)
+ ps[i], ps[j] = ps[j], ps[i]
+ }
+}
+
+// ToResources wraps resource.ResourcesConverter
+func (pages Pages) ToResources() resource.Resources {
+ r := make(resource.Resources, len(pages))
+ for i, p := range pages {
+ r[i] = p
+ }
+ return r
+}
+
+// ToPages tries to convert seq into Pages.
+func ToPages(seq interface{}) (Pages, error) {
+ if seq == nil {
+ return Pages{}, nil
+ }
+
+ switch v := seq.(type) {
+ case Pages:
+ return v, nil
+ case *Pages:
+ return *(v), nil
+ case WeightedPages:
+ return v.Pages(), nil
+ case PageGroup:
+ return v.Pages, nil
+ case []interface{}:
+ pages := make(Pages, len(v))
+ success := true
+ for i, vv := range v {
+ p, ok := vv.(Page)
+ if !ok {
+ success = false
+ break
+ }
+ pages[i] = p
+ }
+ if success {
+ return pages, nil
+ }
+ }
+
+ return nil, fmt.Errorf("cannot convert type %T to Pages", seq)
+}
+
+func (p Pages) Group(key interface{}, in interface{}) (interface{}, error) {
+ pages, err := ToPages(in)
+ if err != nil {
+ return nil, err
+ }
+ return PageGroup{Key: key, Pages: pages}, nil
+}
+
+// Len returns the number of pages in the list.
+func (p Pages) Len() int {
+ return len(p)
+}
+
+func (ps Pages) removeFirstIfFound(p Page) Pages {
+ ii := -1
+ for i, pp := range ps {
+ if p.Eq(pp) {
+ ii = i
+ break
+ }
+ }
+
+ if ii != -1 {
+ ps = append(ps[:ii], ps[ii+1:]...)
+ }
+ return ps
+}
+
+// PagesFactory somehow creates some Pages.
+// We do a lot of lazy Pages initialization in Hugo, so we need a type.
+type PagesFactory func() Pages
diff --git a/resources/page/pages_cache.go b/resources/page/pages_cache.go
new file mode 100644
index 000000000..e82d9a8cf
--- /dev/null
+++ b/resources/page/pages_cache.go
@@ -0,0 +1,136 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "sync"
+)
+
+type pageCacheEntry struct {
+ in []Pages
+ out Pages
+}
+
+func (entry pageCacheEntry) matches(pageLists []Pages) bool {
+ if len(entry.in) != len(pageLists) {
+ return false
+ }
+ for i, p := range pageLists {
+ if !pagesEqual(p, entry.in[i]) {
+ return false
+ }
+ }
+
+ return true
+}
+
+type pageCache struct {
+ sync.RWMutex
+ m map[string][]pageCacheEntry
+}
+
+func newPageCache() *pageCache {
+ return &pageCache{m: make(map[string][]pageCacheEntry)}
+}
+
+func (c *pageCache) clear() {
+ c.Lock()
+ defer c.Unlock()
+ c.m = make(map[string][]pageCacheEntry)
+}
+
+// get/getP gets a Pages slice from the cache matching the given key and
+// all the provided Pages slices.
+// If none found in cache, a copy of the first slice is created.
+//
+// If an apply func is provided, that func is applied to the newly created copy.
+//
+// The getP variant' apply func takes a pointer to Pages.
+//
+// The cache and the execution of the apply func is protected by a RWMutex.
+func (c *pageCache) get(key string, apply func(p Pages), pageLists ...Pages) (Pages, bool) {
+ return c.getP(key, func(p *Pages) {
+ if apply != nil {
+ apply(*p)
+ }
+ }, pageLists...)
+}
+
+func (c *pageCache) getP(key string, apply func(p *Pages), pageLists ...Pages) (Pages, bool) {
+ c.RLock()
+ if cached, ok := c.m[key]; ok {
+ for _, entry := range cached {
+ if entry.matches(pageLists) {
+ c.RUnlock()
+ return entry.out, true
+ }
+ }
+ }
+ c.RUnlock()
+
+ c.Lock()
+ defer c.Unlock()
+
+ // double-check
+ if cached, ok := c.m[key]; ok {
+ for _, entry := range cached {
+ if entry.matches(pageLists) {
+ return entry.out, true
+ }
+ }
+ }
+
+ p := pageLists[0]
+ pagesCopy := append(Pages(nil), p...)
+
+ if apply != nil {
+ apply(&pagesCopy)
+ }
+
+ entry := pageCacheEntry{in: pageLists, out: pagesCopy}
+ if v, ok := c.m[key]; ok {
+ c.m[key] = append(v, entry)
+ } else {
+ c.m[key] = []pageCacheEntry{entry}
+ }
+
+ return pagesCopy, false
+
+}
+
+// pagesEqual returns whether p1 and p2 are equal.
+func pagesEqual(p1, p2 Pages) bool {
+ if p1 == nil && p2 == nil {
+ return true
+ }
+
+ if p1 == nil || p2 == nil {
+ return false
+ }
+
+ if p1.Len() != p2.Len() {
+ return false
+ }
+
+ if p1.Len() == 0 {
+ return true
+ }
+
+ for i := 0; i < len(p1); i++ {
+ if p1[i] != p2[i] {
+ return false
+ }
+ }
+ return true
+}
diff --git a/resources/page/pages_cache_test.go b/resources/page/pages_cache_test.go
new file mode 100644
index 000000000..b83283408
--- /dev/null
+++ b/resources/page/pages_cache_test.go
@@ -0,0 +1,86 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "strconv"
+ "sync"
+ "sync/atomic"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestPageCache(t *testing.T) {
+ t.Parallel()
+ c1 := newPageCache()
+
+ changeFirst := func(p Pages) {
+ p[0].(*testPage).description = "changed"
+ }
+
+ var o1 uint64
+ var o2 uint64
+
+ var wg sync.WaitGroup
+
+ var l1 sync.Mutex
+ var l2 sync.Mutex
+
+ var testPageSets []Pages
+
+ for i := 0; i < 50; i++ {
+ testPageSets = append(testPageSets, createSortTestPages(i+1))
+ }
+
+ for j := 0; j < 100; j++ {
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ for k, pages := range testPageSets {
+ l1.Lock()
+ p, c := c1.get("k1", nil, pages)
+ assert.Equal(t, !atomic.CompareAndSwapUint64(&o1, uint64(k), uint64(k+1)), c)
+ l1.Unlock()
+ p2, c2 := c1.get("k1", nil, p)
+ assert.True(t, c2)
+ assert.True(t, pagesEqual(p, p2))
+ assert.True(t, pagesEqual(p, pages))
+ assert.NotNil(t, p)
+
+ l2.Lock()
+ p3, c3 := c1.get("k2", changeFirst, pages)
+ assert.Equal(t, !atomic.CompareAndSwapUint64(&o2, uint64(k), uint64(k+1)), c3)
+ l2.Unlock()
+ assert.NotNil(t, p3)
+ assert.Equal(t, p3[0].(*testPage).description, "changed")
+ }
+ }()
+ }
+ wg.Wait()
+}
+
+func BenchmarkPageCache(b *testing.B) {
+ cache := newPageCache()
+ pages := make(Pages, 30)
+ for i := 0; i < 30; i++ {
+ pages[i] = &testPage{title: "p" + strconv.Itoa(i)}
+ }
+ key := "key"
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ cache.getP(key, nil, pages)
+ }
+}
diff --git a/resources/page/pages_language_merge.go b/resources/page/pages_language_merge.go
new file mode 100644
index 000000000..11393a754
--- /dev/null
+++ b/resources/page/pages_language_merge.go
@@ -0,0 +1,64 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+)
+
+var (
+ _ pagesLanguageMerger = (*Pages)(nil)
+)
+
+type pagesLanguageMerger interface {
+ MergeByLanguage(other Pages) Pages
+ // Needed for integration with the tpl package.
+ MergeByLanguageInterface(other interface{}) (interface{}, error)
+}
+
+// MergeByLanguage supplies missing translations in p1 with values from p2.
+// The result is sorted by the default sort order for pages.
+func (p1 Pages) MergeByLanguage(p2 Pages) Pages {
+ merge := func(pages *Pages) {
+ m := make(map[string]bool)
+ for _, p := range *pages {
+ m[p.TranslationKey()] = true
+ }
+
+ for _, p := range p2 {
+ if _, found := m[p.TranslationKey()]; !found {
+ *pages = append(*pages, p)
+ }
+ }
+
+ SortByDefault(*pages)
+ }
+
+ out, _ := spc.getP("pages.MergeByLanguage", merge, p1, p2)
+
+ return out
+}
+
+// MergeByLanguageInterface is the generic version of MergeByLanguage. It
+// is here just so it can be called from the tpl package.
+func (p1 Pages) MergeByLanguageInterface(in interface{}) (interface{}, error) {
+ if in == nil {
+ return p1, nil
+ }
+ p2, ok := in.(Pages)
+ if !ok {
+ return nil, fmt.Errorf("%T cannot be merged by language", in)
+ }
+ return p1.MergeByLanguage(p2), nil
+}
diff --git a/resources/page/pages_prev_next.go b/resources/page/pages_prev_next.go
new file mode 100644
index 000000000..9293c9874
--- /dev/null
+++ b/resources/page/pages_prev_next.go
@@ -0,0 +1,42 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+// Prev returns the previous page reletive to the given
+func (p Pages) Prev(cur Page) Page {
+ for x, c := range p {
+ if c.Eq(cur) {
+ if x == 0 {
+ // TODO(bep) consider return nil here to get it line with the other Prevs
+ return p[len(p)-1]
+ }
+ return p[x-1]
+ }
+ }
+ return nil
+}
+
+// Next returns the next page reletive to the given
+func (p Pages) Next(cur Page) Page {
+ for x, c := range p {
+ if c.Eq(cur) {
+ if x < len(p)-1 {
+ return p[x+1]
+ }
+ // TODO(bep) consider return nil here to get it line with the other Nexts
+ return p[0]
+ }
+ }
+ return nil
+}
diff --git a/resources/page/pages_prev_next_test.go b/resources/page/pages_prev_next_test.go
new file mode 100644
index 000000000..c39ad0603
--- /dev/null
+++ b/resources/page/pages_prev_next_test.go
@@ -0,0 +1,83 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "testing"
+
+ "github.com/spf13/cast"
+ "github.com/stretchr/testify/assert"
+)
+
+type pagePNTestObject struct {
+ path string
+ weight int
+ date string
+}
+
+var pagePNTestSources = []pagePNTestObject{
+ {"/section1/testpage1.md", 5, "2012-04-06"},
+ {"/section1/testpage2.md", 4, "2012-01-01"},
+ {"/section1/testpage3.md", 3, "2012-04-06"},
+ {"/section2/testpage4.md", 2, "2012-03-02"},
+ {"/section2/testpage5.md", 1, "2012-04-06"},
+}
+
+func TestPrev(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ assert.Equal(t, pages.Prev(pages[0]), pages[4])
+ assert.Equal(t, pages.Prev(pages[1]), pages[0])
+ assert.Equal(t, pages.Prev(pages[4]), pages[3])
+}
+
+func TestNext(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ assert.Equal(t, pages.Next(pages[0]), pages[1])
+ assert.Equal(t, pages.Next(pages[1]), pages[2])
+ assert.Equal(t, pages.Next(pages[4]), pages[0])
+}
+
+func prepareWeightedPagesPrevNext(t *testing.T) WeightedPages {
+ w := WeightedPages{}
+
+ for _, src := range pagePNTestSources {
+ p := newTestPage()
+ p.path = src.path
+ p.weight = src.weight
+ p.date = cast.ToTime(src.date)
+ p.pubDate = cast.ToTime(src.date)
+ w = append(w, WeightedPage{Weight: p.weight, Page: p})
+ }
+
+ w.Sort()
+ return w
+}
+
+func TestWeightedPagesPrev(t *testing.T) {
+ t.Parallel()
+ w := prepareWeightedPagesPrevNext(t)
+ assert.Equal(t, w.Prev(w[0].Page), w[4].Page)
+ assert.Equal(t, w.Prev(w[1].Page), w[0].Page)
+ assert.Equal(t, w.Prev(w[4].Page), w[3].Page)
+}
+
+func TestWeightedPagesNext(t *testing.T) {
+ t.Parallel()
+ w := prepareWeightedPagesPrevNext(t)
+ assert.Equal(t, w.Next(w[0].Page), w[1].Page)
+ assert.Equal(t, w.Next(w[1].Page), w[2].Page)
+ assert.Equal(t, w.Next(w[4].Page), w[0].Page)
+}
diff --git a/resources/page/pages_related.go b/resources/page/pages_related.go
new file mode 100644
index 000000000..1a4386135
--- /dev/null
+++ b/resources/page/pages_related.go
@@ -0,0 +1,199 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "sync"
+
+ "github.com/gohugoio/hugo/common/types"
+ "github.com/gohugoio/hugo/related"
+ "github.com/pkg/errors"
+ "github.com/spf13/cast"
+)
+
+var (
+ // Assert that Pages and PageGroup implements the PageGenealogist interface.
+ _ PageGenealogist = (Pages)(nil)
+ _ PageGenealogist = PageGroup{}
+)
+
+// A PageGenealogist finds related pages in a page collection. This interface is implemented
+// by Pages and PageGroup, which makes it available as `{{ .RegularRelated . }}` etc.
+type PageGenealogist interface {
+
+ // Template example:
+ // {{ $related := .RegularPages.Related . }}
+ Related(doc related.Document) (Pages, error)
+
+ // Template example:
+ // {{ $related := .RegularPages.RelatedIndices . "tags" "date" }}
+ RelatedIndices(doc related.Document, indices ...interface{}) (Pages, error)
+
+ // Template example:
+ // {{ $related := .RegularPages.RelatedTo ( keyVals "tags" "hugo", "rocks") ( keyVals "date" .Date ) }}
+ RelatedTo(args ...types.KeyValues) (Pages, error)
+}
+
+// Related searches all the configured indices with the search keywords from the
+// supplied document.
+func (p Pages) Related(doc related.Document) (Pages, error) {
+ result, err := p.searchDoc(doc)
+ if err != nil {
+ return nil, err
+ }
+
+ if page, ok := doc.(Page); ok {
+ return result.removeFirstIfFound(page), nil
+ }
+
+ return result, nil
+
+}
+
+// RelatedIndices searches the given indices with the search keywords from the
+// supplied document.
+func (p Pages) RelatedIndices(doc related.Document, indices ...interface{}) (Pages, error) {
+ indicesStr, err := cast.ToStringSliceE(indices)
+ if err != nil {
+ return nil, err
+ }
+
+ result, err := p.searchDoc(doc, indicesStr...)
+ if err != nil {
+ return nil, err
+ }
+
+ if page, ok := doc.(Page); ok {
+ return result.removeFirstIfFound(page), nil
+ }
+
+ return result, nil
+
+}
+
+// RelatedTo searches the given indices with the corresponding values.
+func (p Pages) RelatedTo(args ...types.KeyValues) (Pages, error) {
+ if len(p) == 0 {
+ return nil, nil
+ }
+
+ return p.search(args...)
+
+}
+
+func (p Pages) search(args ...types.KeyValues) (Pages, error) {
+ return p.withInvertedIndex(func(idx *related.InvertedIndex) ([]related.Document, error) {
+ return idx.SearchKeyValues(args...)
+ })
+
+}
+
+func (p Pages) searchDoc(doc related.Document, indices ...string) (Pages, error) {
+ return p.withInvertedIndex(func(idx *related.InvertedIndex) ([]related.Document, error) {
+ return idx.SearchDoc(doc, indices...)
+ })
+}
+
+func (p Pages) withInvertedIndex(search func(idx *related.InvertedIndex) ([]related.Document, error)) (Pages, error) {
+ if len(p) == 0 {
+ return nil, nil
+ }
+
+ d, ok := p[0].(InternalDependencies)
+ if !ok {
+ return nil, errors.Errorf("invalid type %T in related serch", p[0])
+ }
+
+ cache := d.GetRelatedDocsHandler()
+
+ searchIndex, err := cache.getOrCreateIndex(p)
+ if err != nil {
+ return nil, err
+ }
+
+ result, err := search(searchIndex)
+ if err != nil {
+ return nil, err
+ }
+
+ if len(result) > 0 {
+ mp := make(Pages, len(result))
+ for i, match := range result {
+ mp[i] = match.(Page)
+ }
+ return mp, nil
+ }
+
+ return nil, nil
+}
+
+type cachedPostingList struct {
+ p Pages
+
+ postingList *related.InvertedIndex
+}
+
+type RelatedDocsHandler struct {
+ cfg related.Config
+
+ postingLists []*cachedPostingList
+ mu sync.RWMutex
+}
+
+func NewRelatedDocsHandler(cfg related.Config) *RelatedDocsHandler {
+ return &RelatedDocsHandler{cfg: cfg}
+}
+
+func (s *RelatedDocsHandler) Clone() *RelatedDocsHandler {
+ return NewRelatedDocsHandler(s.cfg)
+}
+
+// This assumes that a lock has been acquired.
+func (s *RelatedDocsHandler) getIndex(p Pages) *related.InvertedIndex {
+ for _, ci := range s.postingLists {
+ if pagesEqual(p, ci.p) {
+ return ci.postingList
+ }
+ }
+ return nil
+}
+
+func (s *RelatedDocsHandler) getOrCreateIndex(p Pages) (*related.InvertedIndex, error) {
+ s.mu.RLock()
+ cachedIndex := s.getIndex(p)
+ if cachedIndex != nil {
+ s.mu.RUnlock()
+ return cachedIndex, nil
+ }
+ s.mu.RUnlock()
+
+ s.mu.Lock()
+ defer s.mu.Unlock()
+
+ if cachedIndex := s.getIndex(p); cachedIndex != nil {
+ return cachedIndex, nil
+ }
+
+ searchIndex := related.NewInvertedIndex(s.cfg)
+
+ for _, page := range p {
+ if err := searchIndex.Add(page); err != nil {
+ return nil, err
+ }
+ }
+
+ s.postingLists = append(s.postingLists, &cachedPostingList{p: p, postingList: searchIndex})
+
+ return searchIndex, nil
+}
diff --git a/resources/page/pages_related_test.go b/resources/page/pages_related_test.go
new file mode 100644
index 000000000..016b492c8
--- /dev/null
+++ b/resources/page/pages_related_test.go
@@ -0,0 +1,86 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/common/types"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestRelated(t *testing.T) {
+ assert := require.New(t)
+
+ t.Parallel()
+
+ pages := Pages{
+ &testPage{
+ title: "Page 1",
+ pubDate: mustParseDate("2017-01-03"),
+ params: map[string]interface{}{
+ "keywords": []string{"hugo", "says"},
+ },
+ },
+ &testPage{
+ title: "Page 2",
+ pubDate: mustParseDate("2017-01-02"),
+ params: map[string]interface{}{
+ "keywords": []string{"hugo", "rocks"},
+ },
+ },
+ &testPage{
+ title: "Page 3",
+ pubDate: mustParseDate("2017-01-01"),
+ params: map[string]interface{}{
+ "keywords": []string{"bep", "says"},
+ },
+ },
+ }
+
+ result, err := pages.RelatedTo(types.NewKeyValuesStrings("keywords", "hugo", "rocks"))
+
+ assert.NoError(err)
+ assert.Len(result, 2)
+ assert.Equal("Page 2", result[0].Title())
+ assert.Equal("Page 1", result[1].Title())
+
+ result, err = pages.Related(pages[0])
+ assert.NoError(err)
+ assert.Len(result, 2)
+ assert.Equal("Page 2", result[0].Title())
+ assert.Equal("Page 3", result[1].Title())
+
+ result, err = pages.RelatedIndices(pages[0], "keywords")
+ assert.NoError(err)
+ assert.Len(result, 2)
+ assert.Equal("Page 2", result[0].Title())
+ assert.Equal("Page 3", result[1].Title())
+
+ result, err = pages.RelatedTo(types.NewKeyValuesStrings("keywords", "bep", "rocks"))
+ assert.NoError(err)
+ assert.Len(result, 2)
+ assert.Equal("Page 2", result[0].Title())
+ assert.Equal("Page 3", result[1].Title())
+}
+
+func mustParseDate(s string) time.Time {
+ d, err := time.Parse("2006-01-02", s)
+ if err != nil {
+ panic(err)
+ }
+ return d
+}
diff --git a/resources/page/pages_sort.go b/resources/page/pages_sort.go
new file mode 100644
index 000000000..eb3a28247
--- /dev/null
+++ b/resources/page/pages_sort.go
@@ -0,0 +1,348 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "sort"
+
+ "github.com/gohugoio/hugo/resources/resource"
+
+ "github.com/spf13/cast"
+)
+
+var spc = newPageCache()
+
+/*
+ * Implementation of a custom sorter for Pages
+ */
+
+// A pageSorter implements the sort interface for Pages
+type pageSorter struct {
+ pages Pages
+ by pageBy
+}
+
+// pageBy is a closure used in the Sort.Less method.
+type pageBy func(p1, p2 Page) bool
+
+// Sort stable sorts the pages given the receiver's sort order.
+func (by pageBy) Sort(pages Pages) {
+ ps := &pageSorter{
+ pages: pages,
+ by: by, // The Sort method's receiver is the function (closure) that defines the sort order.
+ }
+ sort.Stable(ps)
+}
+
+// DefaultPageSort is the default sort func for pages in Hugo:
+// Order by Weight, Date, LinkTitle and then full file path.
+var DefaultPageSort = func(p1, p2 Page) bool {
+ if p1.Weight() == p2.Weight() {
+ if p1.Date().Unix() == p2.Date().Unix() {
+ if p1.LinkTitle() == p2.LinkTitle() {
+ if p1.File() == nil || p2.File() == nil {
+ return p1.File() == nil
+ }
+ return p1.File().Filename() < p2.File().Filename()
+ }
+ return (p1.LinkTitle() < p2.LinkTitle())
+ }
+ return p1.Date().Unix() > p2.Date().Unix()
+ }
+
+ if p2.Weight() == 0 {
+ return true
+ }
+
+ if p1.Weight() == 0 {
+ return false
+ }
+
+ return p1.Weight() < p2.Weight()
+}
+
+var languagePageSort = func(p1, p2 Page) bool {
+
+ if p1.Language().Weight == p2.Language().Weight {
+ if p1.Date().Unix() == p2.Date().Unix() {
+ if p1.LinkTitle() == p2.LinkTitle() {
+ if p1.File() != nil && p2.File() != nil {
+ return p1.File().Filename() < p2.File().Filename()
+ }
+ }
+ return (p1.LinkTitle() < p2.LinkTitle())
+ }
+ return p1.Date().Unix() > p2.Date().Unix()
+ }
+
+ if p2.Language().Weight == 0 {
+ return true
+ }
+
+ if p1.Language().Weight == 0 {
+ return false
+ }
+
+ return p1.Language().Weight < p2.Language().Weight
+}
+
+func (ps *pageSorter) Len() int { return len(ps.pages) }
+func (ps *pageSorter) Swap(i, j int) { ps.pages[i], ps.pages[j] = ps.pages[j], ps.pages[i] }
+
+// Less is part of sort.Interface. It is implemented by calling the "by" closure in the sorter.
+func (ps *pageSorter) Less(i, j int) bool { return ps.by(ps.pages[i], ps.pages[j]) }
+
+// Limit limits the number of pages returned to n.
+func (p Pages) Limit(n int) Pages {
+ if len(p) > n {
+ return p[0:n]
+ }
+ return p
+}
+
+// ByWeight sorts the Pages by weight and returns a copy.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByWeight() Pages {
+ const key = "pageSort.ByWeight"
+ pages, _ := spc.get(key, pageBy(DefaultPageSort).Sort, p)
+ return pages
+}
+
+// SortByDefault sorts pages by the default sort.
+func SortByDefault(pages Pages) {
+ pageBy(DefaultPageSort).Sort(pages)
+}
+
+// ByTitle sorts the Pages by title and returns a copy.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByTitle() Pages {
+
+ const key = "pageSort.ByTitle"
+
+ title := func(p1, p2 Page) bool {
+ return p1.Title() < p2.Title()
+ }
+
+ pages, _ := spc.get(key, pageBy(title).Sort, p)
+ return pages
+}
+
+// ByLinkTitle sorts the Pages by link title and returns a copy.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByLinkTitle() Pages {
+
+ const key = "pageSort.ByLinkTitle"
+
+ linkTitle := func(p1, p2 Page) bool {
+ return p1.LinkTitle() < p2.LinkTitle()
+ }
+
+ pages, _ := spc.get(key, pageBy(linkTitle).Sort, p)
+
+ return pages
+}
+
+// ByDate sorts the Pages by date and returns a copy.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByDate() Pages {
+
+ const key = "pageSort.ByDate"
+
+ date := func(p1, p2 Page) bool {
+ return p1.Date().Unix() < p2.Date().Unix()
+ }
+
+ pages, _ := spc.get(key, pageBy(date).Sort, p)
+
+ return pages
+}
+
+// ByPublishDate sorts the Pages by publish date and returns a copy.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByPublishDate() Pages {
+
+ const key = "pageSort.ByPublishDate"
+
+ pubDate := func(p1, p2 Page) bool {
+ return p1.PublishDate().Unix() < p2.PublishDate().Unix()
+ }
+
+ pages, _ := spc.get(key, pageBy(pubDate).Sort, p)
+
+ return pages
+}
+
+// ByExpiryDate sorts the Pages by publish date and returns a copy.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByExpiryDate() Pages {
+
+ const key = "pageSort.ByExpiryDate"
+
+ expDate := func(p1, p2 Page) bool {
+ return p1.ExpiryDate().Unix() < p2.ExpiryDate().Unix()
+ }
+
+ pages, _ := spc.get(key, pageBy(expDate).Sort, p)
+
+ return pages
+}
+
+// ByLastmod sorts the Pages by the last modification date and returns a copy.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByLastmod() Pages {
+
+ const key = "pageSort.ByLastmod"
+
+ date := func(p1, p2 Page) bool {
+ return p1.Lastmod().Unix() < p2.Lastmod().Unix()
+ }
+
+ pages, _ := spc.get(key, pageBy(date).Sort, p)
+
+ return pages
+}
+
+// ByLength sorts the Pages by length and returns a copy.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByLength() Pages {
+
+ const key = "pageSort.ByLength"
+
+ length := func(p1, p2 Page) bool {
+
+ p1l, ok1 := p1.(resource.LengthProvider)
+ p2l, ok2 := p2.(resource.LengthProvider)
+
+ if !ok1 {
+ return true
+ }
+
+ if !ok2 {
+ return false
+ }
+
+ return p1l.Len() < p2l.Len()
+ }
+
+ pages, _ := spc.get(key, pageBy(length).Sort, p)
+
+ return pages
+}
+
+// ByLanguage sorts the Pages by the language's Weight.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByLanguage() Pages {
+
+ const key = "pageSort.ByLanguage"
+
+ pages, _ := spc.get(key, pageBy(languagePageSort).Sort, p)
+
+ return pages
+}
+
+// SortByLanguage sorts the pages by language.
+func SortByLanguage(pages Pages) {
+ pageBy(languagePageSort).Sort(pages)
+}
+
+// Reverse reverses the order in Pages and returns a copy.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) Reverse() Pages {
+ const key = "pageSort.Reverse"
+
+ reverseFunc := func(pages Pages) {
+ for i, j := 0, len(pages)-1; i < j; i, j = i+1, j-1 {
+ pages[i], pages[j] = pages[j], pages[i]
+ }
+ }
+
+ pages, _ := spc.get(key, reverseFunc, p)
+
+ return pages
+}
+
+// ByParam sorts the pages according to the given page Params key.
+//
+// Adjacent invocations on the same receiver with the same paramsKey will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByParam(paramsKey interface{}) Pages {
+ paramsKeyStr := cast.ToString(paramsKey)
+ key := "pageSort.ByParam." + paramsKeyStr
+
+ paramsKeyComparator := func(p1, p2 Page) bool {
+ v1, _ := p1.Param(paramsKeyStr)
+ v2, _ := p2.Param(paramsKeyStr)
+
+ if v1 == nil {
+ return false
+ }
+
+ if v2 == nil {
+ return true
+ }
+
+ isNumeric := func(v interface{}) bool {
+ switch v.(type) {
+ case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64, float32, float64:
+ return true
+ default:
+ return false
+ }
+ }
+
+ if isNumeric(v1) && isNumeric(v2) {
+ return cast.ToFloat64(v1) < cast.ToFloat64(v2)
+ }
+
+ s1 := cast.ToString(v1)
+ s2 := cast.ToString(v2)
+
+ return s1 < s2
+ }
+
+ pages, _ := spc.get(key, pageBy(paramsKeyComparator).Sort, p)
+
+ return pages
+}
diff --git a/resources/page/pages_sort_test.go b/resources/page/pages_sort_test.go
new file mode 100644
index 000000000..c781de2f3
--- /dev/null
+++ b/resources/page/pages_sort_test.go
@@ -0,0 +1,279 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/resources/resource"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDefaultSort(t *testing.T) {
+ t.Parallel()
+ d1 := time.Now()
+ d2 := d1.Add(-1 * time.Hour)
+ d3 := d1.Add(-2 * time.Hour)
+ d4 := d1.Add(-3 * time.Hour)
+
+ p := createSortTestPages(4)
+
+ // first by weight
+ setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "a", "c", "d"}, [4]int{4, 3, 2, 1}, p)
+ SortByDefault(p)
+
+ assert.Equal(t, 1, p[0].Weight())
+
+ // Consider zero weight, issue #2673
+ setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "a", "d", "c"}, [4]int{0, 0, 0, 1}, p)
+ SortByDefault(p)
+
+ assert.Equal(t, 1, p[0].Weight())
+
+ // next by date
+ setSortVals([4]time.Time{d3, d4, d1, d2}, [4]string{"a", "b", "c", "d"}, [4]int{1, 1, 1, 1}, p)
+ SortByDefault(p)
+ assert.Equal(t, d1, p[0].Date())
+
+ // finally by link title
+ setSortVals([4]time.Time{d3, d3, d3, d3}, [4]string{"b", "c", "a", "d"}, [4]int{1, 1, 1, 1}, p)
+ SortByDefault(p)
+ assert.Equal(t, "al", p[0].LinkTitle())
+ assert.Equal(t, "bl", p[1].LinkTitle())
+ assert.Equal(t, "cl", p[2].LinkTitle())
+}
+
+// https://github.com/gohugoio/hugo/issues/4953
+func TestSortByLinkTitle(t *testing.T) {
+ t.Parallel()
+ assert := require.New(t)
+ pages := createSortTestPages(6)
+
+ for i, p := range pages {
+ pp := p.(*testPage)
+ if i < 5 {
+ pp.title = fmt.Sprintf("title%d", i)
+ }
+
+ if i > 2 {
+ pp.linkTitle = fmt.Sprintf("linkTitle%d", i)
+ }
+
+ }
+
+ pages.shuffle()
+
+ bylt := pages.ByLinkTitle()
+
+ for i, p := range bylt {
+ msg := fmt.Sprintf("test: %d", i)
+ if i < 3 {
+ assert.Equal(fmt.Sprintf("linkTitle%d", i+3), p.LinkTitle(), msg)
+ } else {
+ assert.Equal(fmt.Sprintf("title%d", i-3), p.LinkTitle(), msg)
+ }
+ }
+}
+
+func TestSortByN(t *testing.T) {
+ t.Parallel()
+ d1 := time.Now()
+ d2 := d1.Add(-2 * time.Hour)
+ d3 := d1.Add(-10 * time.Hour)
+ d4 := d1.Add(-20 * time.Hour)
+
+ p := createSortTestPages(4)
+
+ for i, this := range []struct {
+ sortFunc func(p Pages) Pages
+ assertFunc func(p Pages) bool
+ }{
+ {(Pages).ByWeight, func(p Pages) bool { return p[0].Weight() == 1 }},
+ {(Pages).ByTitle, func(p Pages) bool { return p[0].Title() == "ab" }},
+ {(Pages).ByLinkTitle, func(p Pages) bool { return p[0].LinkTitle() == "abl" }},
+ {(Pages).ByDate, func(p Pages) bool { return p[0].Date() == d4 }},
+ {(Pages).ByPublishDate, func(p Pages) bool { return p[0].PublishDate() == d4 }},
+ {(Pages).ByExpiryDate, func(p Pages) bool { return p[0].ExpiryDate() == d4 }},
+ {(Pages).ByLastmod, func(p Pages) bool { return p[1].Lastmod() == d3 }},
+ {(Pages).ByLength, func(p Pages) bool { return p[0].(resource.LengthProvider).Len() == len(p[0].(*testPage).content) }},
+ } {
+ setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "ab", "cde", "fg"}, [4]int{0, 3, 2, 1}, p)
+
+ sorted := this.sortFunc(p)
+ if !this.assertFunc(sorted) {
+ t.Errorf("[%d] sort error", i)
+ }
+ }
+
+}
+
+func TestLimit(t *testing.T) {
+ t.Parallel()
+ p := createSortTestPages(10)
+ firstFive := p.Limit(5)
+ assert.Equal(t, 5, len(firstFive))
+ for i := 0; i < 5; i++ {
+ assert.Equal(t, p[i], firstFive[i])
+ }
+ assert.Equal(t, p, p.Limit(10))
+ assert.Equal(t, p, p.Limit(11))
+}
+
+func TestPageSortReverse(t *testing.T) {
+ t.Parallel()
+ p1 := createSortTestPages(10)
+ assert.Equal(t, 0, p1[0].(*testPage).fuzzyWordCount)
+ assert.Equal(t, 9, p1[9].(*testPage).fuzzyWordCount)
+ p2 := p1.Reverse()
+ assert.Equal(t, 9, p2[0].(*testPage).fuzzyWordCount)
+ assert.Equal(t, 0, p2[9].(*testPage).fuzzyWordCount)
+ // cached
+ assert.True(t, pagesEqual(p2, p1.Reverse()))
+}
+
+func TestPageSortByParam(t *testing.T) {
+ t.Parallel()
+ var k interface{} = "arbitrarily.nested"
+
+ unsorted := createSortTestPages(10)
+ delete(unsorted[9].Params(), "arbitrarily")
+
+ firstSetValue, _ := unsorted[0].Param(k)
+ secondSetValue, _ := unsorted[1].Param(k)
+ lastSetValue, _ := unsorted[8].Param(k)
+ unsetValue, _ := unsorted[9].Param(k)
+
+ assert.Equal(t, "xyz100", firstSetValue)
+ assert.Equal(t, "xyz99", secondSetValue)
+ assert.Equal(t, "xyz92", lastSetValue)
+ assert.Equal(t, nil, unsetValue)
+
+ sorted := unsorted.ByParam("arbitrarily.nested")
+ firstSetSortedValue, _ := sorted[0].Param(k)
+ secondSetSortedValue, _ := sorted[1].Param(k)
+ lastSetSortedValue, _ := sorted[8].Param(k)
+ unsetSortedValue, _ := sorted[9].Param(k)
+
+ assert.Equal(t, firstSetValue, firstSetSortedValue)
+ assert.Equal(t, secondSetValue, lastSetSortedValue)
+ assert.Equal(t, lastSetValue, secondSetSortedValue)
+ assert.Equal(t, unsetValue, unsetSortedValue)
+}
+
+func TestPageSortByParamNumeric(t *testing.T) {
+ t.Parallel()
+ var k interface{} = "arbitrarily.nested"
+
+ n := 10
+ unsorted := createSortTestPages(n)
+ for i := 0; i < n; i++ {
+ v := 100 - i
+ if i%2 == 0 {
+ v = 100.0 - i
+ }
+
+ unsorted[i].(*testPage).params = map[string]interface{}{
+ "arbitrarily": map[string]interface{}{
+ "nested": v,
+ },
+ }
+ }
+ delete(unsorted[9].Params(), "arbitrarily")
+
+ firstSetValue, _ := unsorted[0].Param(k)
+ secondSetValue, _ := unsorted[1].Param(k)
+ lastSetValue, _ := unsorted[8].Param(k)
+ unsetValue, _ := unsorted[9].Param(k)
+
+ assert.Equal(t, 100, firstSetValue)
+ assert.Equal(t, 99, secondSetValue)
+ assert.Equal(t, 92, lastSetValue)
+ assert.Equal(t, nil, unsetValue)
+
+ sorted := unsorted.ByParam("arbitrarily.nested")
+ firstSetSortedValue, _ := sorted[0].Param(k)
+ secondSetSortedValue, _ := sorted[1].Param(k)
+ lastSetSortedValue, _ := sorted[8].Param(k)
+ unsetSortedValue, _ := sorted[9].Param(k)
+
+ assert.Equal(t, 92, firstSetSortedValue)
+ assert.Equal(t, 93, secondSetSortedValue)
+ assert.Equal(t, 100, lastSetSortedValue)
+ assert.Equal(t, unsetValue, unsetSortedValue)
+}
+
+func BenchmarkSortByWeightAndReverse(b *testing.B) {
+ p := createSortTestPages(300)
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ p = p.ByWeight().Reverse()
+ }
+}
+
+func setSortVals(dates [4]time.Time, titles [4]string, weights [4]int, pages Pages) {
+ for i := range dates {
+ this := pages[i].(*testPage)
+ other := pages[len(dates)-1-i].(*testPage)
+
+ this.date = dates[i]
+ this.lastMod = dates[i]
+ this.weight = weights[i]
+ this.title = titles[i]
+ // make sure we compare apples and ... apples ...
+ other.linkTitle = this.Title() + "l"
+ other.pubDate = dates[i]
+ other.expiryDate = dates[i]
+ other.content = titles[i] + "_content"
+ }
+ lastLastMod := pages[2].Lastmod()
+ pages[2].(*testPage).lastMod = pages[1].Lastmod()
+ pages[1].(*testPage).lastMod = lastLastMod
+
+ for _, p := range pages {
+ p.(*testPage).content = ""
+ }
+
+}
+
+func createSortTestPages(num int) Pages {
+ pages := make(Pages, num)
+
+ for i := 0; i < num; i++ {
+ p := newTestPage()
+ p.path = fmt.Sprintf("/x/y/p%d.md", i)
+ p.params = map[string]interface{}{
+ "arbitrarily": map[string]interface{}{
+ "nested": ("xyz" + fmt.Sprintf("%v", 100-i)),
+ },
+ }
+
+ w := 5
+
+ if i%2 == 0 {
+ w = 10
+ }
+ p.fuzzyWordCount = i
+ p.weight = w
+ p.description = "initial"
+
+ pages[i] = p
+ }
+
+ return pages
+}
diff --git a/resources/page/pagination.go b/resources/page/pagination.go
new file mode 100644
index 000000000..6d5da966e
--- /dev/null
+++ b/resources/page/pagination.go
@@ -0,0 +1,404 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "errors"
+ "fmt"
+ "html/template"
+ "math"
+ "reflect"
+
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/spf13/cast"
+)
+
+// PaginatorProvider provides two ways to create a page paginator.
+type PaginatorProvider interface {
+ Paginator(options ...interface{}) (*Pager, error)
+ Paginate(seq interface{}, options ...interface{}) (*Pager, error)
+}
+
+// Pager represents one of the elements in a paginator.
+// The number, starting on 1, represents its place.
+type Pager struct {
+ number int
+ *Paginator
+}
+
+func (p Pager) String() string {
+ return fmt.Sprintf("Pager %d", p.number)
+}
+
+type paginatedElement interface {
+ Len() int
+}
+
+type pagers []*Pager
+
+var (
+ paginatorEmptyPages Pages
+ paginatorEmptyPageGroups PagesGroup
+)
+
+type Paginator struct {
+ paginatedElements []paginatedElement
+ pagers
+ paginationURLFactory
+ total int
+ size int
+}
+
+type paginationURLFactory func(int) string
+
+// PageNumber returns the current page's number in the pager sequence.
+func (p *Pager) PageNumber() int {
+ return p.number
+}
+
+// URL returns the URL to the current page.
+func (p *Pager) URL() template.HTML {
+ return template.HTML(p.paginationURLFactory(p.PageNumber()))
+}
+
+// Pages returns the Pages on this page.
+// Note: If this return a non-empty result, then PageGroups() will return empty.
+func (p *Pager) Pages() Pages {
+ if len(p.paginatedElements) == 0 {
+ return paginatorEmptyPages
+ }
+
+ if pages, ok := p.element().(Pages); ok {
+ return pages
+ }
+
+ return paginatorEmptyPages
+}
+
+// PageGroups return Page groups for this page.
+// Note: If this return non-empty result, then Pages() will return empty.
+func (p *Pager) PageGroups() PagesGroup {
+ if len(p.paginatedElements) == 0 {
+ return paginatorEmptyPageGroups
+ }
+
+ if groups, ok := p.element().(PagesGroup); ok {
+ return groups
+ }
+
+ return paginatorEmptyPageGroups
+}
+
+func (p *Pager) element() paginatedElement {
+ if len(p.paginatedElements) == 0 {
+ return paginatorEmptyPages
+ }
+ return p.paginatedElements[p.PageNumber()-1]
+}
+
+// page returns the Page with the given index
+func (p *Pager) page(index int) (Page, error) {
+
+ if pages, ok := p.element().(Pages); ok {
+ if pages != nil && len(pages) > index {
+ return pages[index], nil
+ }
+ return nil, nil
+ }
+
+ // must be PagesGroup
+ // this construction looks clumsy, but ...
+ // ... it is the difference between 99.5% and 100% test coverage :-)
+ groups := p.element().(PagesGroup)
+
+ i := 0
+ for _, v := range groups {
+ for _, page := range v.Pages {
+ if i == index {
+ return page, nil
+ }
+ i++
+ }
+ }
+ return nil, nil
+}
+
+// NumberOfElements gets the number of elements on this page.
+func (p *Pager) NumberOfElements() int {
+ return p.element().Len()
+}
+
+// HasPrev tests whether there are page(s) before the current.
+func (p *Pager) HasPrev() bool {
+ return p.PageNumber() > 1
+}
+
+// Prev returns the pager for the previous page.
+func (p *Pager) Prev() *Pager {
+ if !p.HasPrev() {
+ return nil
+ }
+ return p.pagers[p.PageNumber()-2]
+}
+
+// HasNext tests whether there are page(s) after the current.
+func (p *Pager) HasNext() bool {
+ return p.PageNumber() < len(p.paginatedElements)
+}
+
+// Next returns the pager for the next page.
+func (p *Pager) Next() *Pager {
+ if !p.HasNext() {
+ return nil
+ }
+ return p.pagers[p.PageNumber()]
+}
+
+// First returns the pager for the first page.
+func (p *Pager) First() *Pager {
+ return p.pagers[0]
+}
+
+// Last returns the pager for the last page.
+func (p *Pager) Last() *Pager {
+ return p.pagers[len(p.pagers)-1]
+}
+
+// Pagers returns a list of pagers that can be used to build a pagination menu.
+func (p *Paginator) Pagers() pagers {
+ return p.pagers
+}
+
+// PageSize returns the size of each paginator page.
+func (p *Paginator) PageSize() int {
+ return p.size
+}
+
+// TotalPages returns the number of pages in the paginator.
+func (p *Paginator) TotalPages() int {
+ return len(p.paginatedElements)
+}
+
+// TotalNumberOfElements returns the number of elements on all pages in this paginator.
+func (p *Paginator) TotalNumberOfElements() int {
+ return p.total
+}
+
+func splitPages(pages Pages, size int) []paginatedElement {
+ var split []paginatedElement
+ for low, j := 0, len(pages); low < j; low += size {
+ high := int(math.Min(float64(low+size), float64(len(pages))))
+ split = append(split, pages[low:high])
+ }
+
+ return split
+}
+
+func splitPageGroups(pageGroups PagesGroup, size int) []paginatedElement {
+
+ type keyPage struct {
+ key interface{}
+ page Page
+ }
+
+ var (
+ split []paginatedElement
+ flattened []keyPage
+ )
+
+ for _, g := range pageGroups {
+ for _, p := range g.Pages {
+ flattened = append(flattened, keyPage{g.Key, p})
+ }
+ }
+
+ numPages := len(flattened)
+
+ for low, j := 0, numPages; low < j; low += size {
+ high := int(math.Min(float64(low+size), float64(numPages)))
+
+ var (
+ pg PagesGroup
+ key interface{}
+ groupIndex = -1
+ )
+
+ for k := low; k < high; k++ {
+ kp := flattened[k]
+ if key == nil || key != kp.key {
+ key = kp.key
+ pg = append(pg, PageGroup{Key: key})
+ groupIndex++
+ }
+ pg[groupIndex].Pages = append(pg[groupIndex].Pages, kp.page)
+ }
+ split = append(split, pg)
+ }
+
+ return split
+}
+
+func ResolvePagerSize(cfg config.Provider, options ...interface{}) (int, error) {
+ if len(options) == 0 {
+ return cfg.GetInt("paginate"), nil
+ }
+
+ if len(options) > 1 {
+ return -1, errors.New("too many arguments, 'pager size' is currently the only option")
+ }
+
+ pas, err := cast.ToIntE(options[0])
+
+ if err != nil || pas <= 0 {
+ return -1, errors.New(("'pager size' must be a positive integer"))
+ }
+
+ return pas, nil
+}
+
+func Paginate(td TargetPathDescriptor, seq interface{}, pagerSize int) (*Paginator, error) {
+
+ if pagerSize <= 0 {
+ return nil, errors.New("'paginate' configuration setting must be positive to paginate")
+ }
+
+ urlFactory := newPaginationURLFactory(td)
+
+ var paginator *Paginator
+
+ groups, err := ToPagesGroup(seq)
+ if err != nil {
+ return nil, err
+ }
+ if groups != nil {
+ paginator, _ = newPaginatorFromPageGroups(groups, pagerSize, urlFactory)
+ } else {
+ pages, err := ToPages(seq)
+ if err != nil {
+ return nil, err
+ }
+ paginator, _ = newPaginatorFromPages(pages, pagerSize, urlFactory)
+ }
+
+ return paginator, nil
+}
+
+// probablyEqual checks page lists for probable equality.
+// It may return false positives.
+// The motivation behind this is to avoid potential costly reflect.DeepEqual
+// when "probably" is good enough.
+func probablyEqualPageLists(a1 interface{}, a2 interface{}) bool {
+
+ if a1 == nil || a2 == nil {
+ return a1 == a2
+ }
+
+ t1 := reflect.TypeOf(a1)
+ t2 := reflect.TypeOf(a2)
+
+ if t1 != t2 {
+ return false
+ }
+
+ if g1, ok := a1.(PagesGroup); ok {
+ g2 := a2.(PagesGroup)
+ if len(g1) != len(g2) {
+ return false
+ }
+ if len(g1) == 0 {
+ return true
+ }
+ if g1.Len() != g2.Len() {
+ return false
+ }
+
+ return g1[0].Pages[0] == g2[0].Pages[0]
+ }
+
+ p1, err1 := ToPages(a1)
+ p2, err2 := ToPages(a2)
+
+ // probably the same wrong type
+ if err1 != nil && err2 != nil {
+ return true
+ }
+
+ if len(p1) != len(p2) {
+ return false
+ }
+
+ if len(p1) == 0 {
+ return true
+ }
+
+ return p1[0] == p2[0]
+}
+
+func newPaginatorFromPages(pages Pages, size int, urlFactory paginationURLFactory) (*Paginator, error) {
+
+ if size <= 0 {
+ return nil, errors.New("Paginator size must be positive")
+ }
+
+ split := splitPages(pages, size)
+
+ return newPaginator(split, len(pages), size, urlFactory)
+}
+
+func newPaginatorFromPageGroups(pageGroups PagesGroup, size int, urlFactory paginationURLFactory) (*Paginator, error) {
+
+ if size <= 0 {
+ return nil, errors.New("Paginator size must be positive")
+ }
+
+ split := splitPageGroups(pageGroups, size)
+
+ return newPaginator(split, pageGroups.Len(), size, urlFactory)
+}
+
+func newPaginator(elements []paginatedElement, total, size int, urlFactory paginationURLFactory) (*Paginator, error) {
+ p := &Paginator{total: total, paginatedElements: elements, size: size, paginationURLFactory: urlFactory}
+
+ var ps pagers
+
+ if len(elements) > 0 {
+ ps = make(pagers, len(elements))
+ for i := range p.paginatedElements {
+ ps[i] = &Pager{number: (i + 1), Paginator: p}
+ }
+ } else {
+ ps = make(pagers, 1)
+ ps[0] = &Pager{number: 1, Paginator: p}
+ }
+
+ p.pagers = ps
+
+ return p, nil
+}
+
+func newPaginationURLFactory(d TargetPathDescriptor) paginationURLFactory {
+
+ return func(pageNumber int) string {
+ pathDescriptor := d
+ var rel string
+ if pageNumber > 1 {
+ rel = fmt.Sprintf("/%s/%d/", d.PathSpec.PaginatePath, pageNumber)
+ pathDescriptor.Addends = rel
+ }
+
+ return CreateTargetPaths(pathDescriptor).RelPermalink(d.PathSpec)
+
+ }
+}
diff --git a/resources/page/pagination_test.go b/resources/page/pagination_test.go
new file mode 100644
index 000000000..1308d60d1
--- /dev/null
+++ b/resources/page/pagination_test.go
@@ -0,0 +1,307 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+ "html/template"
+ "testing"
+
+ "github.com/spf13/viper"
+
+ "github.com/gohugoio/hugo/output"
+ "github.com/stretchr/testify/require"
+)
+
+func TestSplitPages(t *testing.T) {
+ t.Parallel()
+
+ pages := createTestPages(21)
+ chunks := splitPages(pages, 5)
+ require.Equal(t, 5, len(chunks))
+
+ for i := 0; i < 4; i++ {
+ require.Equal(t, 5, chunks[i].Len())
+ }
+
+ lastChunk := chunks[4]
+ require.Equal(t, 1, lastChunk.Len())
+
+}
+
+func TestSplitPageGroups(t *testing.T) {
+ t.Parallel()
+ pages := createTestPages(21)
+ groups, _ := pages.GroupBy("Weight", "desc")
+ chunks := splitPageGroups(groups, 5)
+ require.Equal(t, 5, len(chunks))
+
+ firstChunk := chunks[0]
+
+ // alternate weight 5 and 10
+ if groups, ok := firstChunk.(PagesGroup); ok {
+ require.Equal(t, 5, groups.Len())
+ for _, pg := range groups {
+ // first group 10 in weight
+ require.Equal(t, 10, pg.Key)
+ for _, p := range pg.Pages {
+ require.True(t, p.FuzzyWordCount()%2 == 0) // magic test
+ }
+ }
+ } else {
+ t.Fatal("Excepted PageGroup")
+ }
+
+ lastChunk := chunks[4]
+
+ if groups, ok := lastChunk.(PagesGroup); ok {
+ require.Equal(t, 1, groups.Len())
+ for _, pg := range groups {
+ // last should have 5 in weight
+ require.Equal(t, 5, pg.Key)
+ for _, p := range pg.Pages {
+ require.True(t, p.FuzzyWordCount()%2 != 0) // magic test
+ }
+ }
+ } else {
+ t.Fatal("Excepted PageGroup")
+ }
+
+}
+
+func TestPager(t *testing.T) {
+ t.Parallel()
+ pages := createTestPages(21)
+ groups, _ := pages.GroupBy("Weight", "desc")
+
+ urlFactory := func(page int) string {
+ return fmt.Sprintf("page/%d/", page)
+ }
+
+ _, err := newPaginatorFromPages(pages, -1, urlFactory)
+ require.NotNil(t, err)
+
+ _, err = newPaginatorFromPageGroups(groups, -1, urlFactory)
+ require.NotNil(t, err)
+
+ pag, err := newPaginatorFromPages(pages, 5, urlFactory)
+ require.Nil(t, err)
+ doTestPages(t, pag)
+ first := pag.Pagers()[0].First()
+ require.Equal(t, "Pager 1", first.String())
+ require.NotEmpty(t, first.Pages())
+ require.Empty(t, first.PageGroups())
+
+ pag, err = newPaginatorFromPageGroups(groups, 5, urlFactory)
+ require.Nil(t, err)
+ doTestPages(t, pag)
+ first = pag.Pagers()[0].First()
+ require.NotEmpty(t, first.PageGroups())
+ require.Empty(t, first.Pages())
+
+}
+
+func doTestPages(t *testing.T, paginator *Paginator) {
+
+ paginatorPages := paginator.Pagers()
+
+ require.Equal(t, 5, len(paginatorPages))
+ require.Equal(t, 21, paginator.TotalNumberOfElements())
+ require.Equal(t, 5, paginator.PageSize())
+ require.Equal(t, 5, paginator.TotalPages())
+
+ first := paginatorPages[0]
+ require.Equal(t, template.HTML("page/1/"), first.URL())
+ require.Equal(t, first, first.First())
+ require.True(t, first.HasNext())
+ require.Equal(t, paginatorPages[1], first.Next())
+ require.False(t, first.HasPrev())
+ require.Nil(t, first.Prev())
+ require.Equal(t, 5, first.NumberOfElements())
+ require.Equal(t, 1, first.PageNumber())
+
+ third := paginatorPages[2]
+ require.True(t, third.HasNext())
+ require.True(t, third.HasPrev())
+ require.Equal(t, paginatorPages[1], third.Prev())
+
+ last := paginatorPages[4]
+ require.Equal(t, template.HTML("page/5/"), last.URL())
+ require.Equal(t, last, last.Last())
+ require.False(t, last.HasNext())
+ require.Nil(t, last.Next())
+ require.True(t, last.HasPrev())
+ require.Equal(t, 1, last.NumberOfElements())
+ require.Equal(t, 5, last.PageNumber())
+}
+
+func TestPagerNoPages(t *testing.T) {
+ t.Parallel()
+ pages := createTestPages(0)
+ groups, _ := pages.GroupBy("Weight", "desc")
+
+ urlFactory := func(page int) string {
+ return fmt.Sprintf("page/%d/", page)
+ }
+
+ paginator, _ := newPaginatorFromPages(pages, 5, urlFactory)
+ doTestPagerNoPages(t, paginator)
+
+ first := paginator.Pagers()[0].First()
+ require.Empty(t, first.PageGroups())
+ require.Empty(t, first.Pages())
+
+ paginator, _ = newPaginatorFromPageGroups(groups, 5, urlFactory)
+ doTestPagerNoPages(t, paginator)
+
+ first = paginator.Pagers()[0].First()
+ require.Empty(t, first.PageGroups())
+ require.Empty(t, first.Pages())
+
+}
+
+func doTestPagerNoPages(t *testing.T, paginator *Paginator) {
+ paginatorPages := paginator.Pagers()
+
+ require.Equal(t, 1, len(paginatorPages))
+ require.Equal(t, 0, paginator.TotalNumberOfElements())
+ require.Equal(t, 5, paginator.PageSize())
+ require.Equal(t, 0, paginator.TotalPages())
+
+ // pageOne should be nothing but the first
+ pageOne := paginatorPages[0]
+ require.NotNil(t, pageOne.First())
+ require.False(t, pageOne.HasNext())
+ require.False(t, pageOne.HasPrev())
+ require.Nil(t, pageOne.Next())
+ require.Equal(t, 1, len(pageOne.Pagers()))
+ require.Equal(t, 0, pageOne.Pages().Len())
+ require.Equal(t, 0, pageOne.NumberOfElements())
+ require.Equal(t, 0, pageOne.TotalNumberOfElements())
+ require.Equal(t, 0, pageOne.TotalPages())
+ require.Equal(t, 1, pageOne.PageNumber())
+ require.Equal(t, 5, pageOne.PageSize())
+
+}
+
+func TestPaginationURLFactory(t *testing.T) {
+ t.Parallel()
+ cfg := viper.New()
+ cfg.Set("paginatePath", "zoo")
+
+ for _, uglyURLs := range []bool{false, true} {
+ t.Run(fmt.Sprintf("uglyURLs=%t", uglyURLs), func(t *testing.T) {
+
+ tests := []struct {
+ name string
+ d TargetPathDescriptor
+ baseURL string
+ page int
+ expected string
+ expectedUgly string
+ }{
+ {"HTML home page 32",
+ TargetPathDescriptor{Kind: KindHome, Type: output.HTMLFormat}, "http://example.com/", 32, "/zoo/32/", "/zoo/32.html"},
+ {"JSON home page 42",
+ TargetPathDescriptor{Kind: KindHome, Type: output.JSONFormat}, "http://example.com/", 42, "/zoo/42/index.json", "/zoo/42.json"},
+ }
+
+ for _, test := range tests {
+ d := test.d
+ cfg.Set("baseURL", test.baseURL)
+ cfg.Set("uglyURLs", uglyURLs)
+ d.UglyURLs = uglyURLs
+
+ pathSpec := newTestPathSpecFor(cfg)
+ d.PathSpec = pathSpec
+
+ factory := newPaginationURLFactory(d)
+
+ got := factory(test.page)
+
+ if uglyURLs {
+ require.Equal(t, test.expectedUgly, got)
+ } else {
+ require.Equal(t, test.expected, got)
+ }
+
+ }
+ })
+
+ }
+}
+
+func TestProbablyEqualPageLists(t *testing.T) {
+ t.Parallel()
+ fivePages := createTestPages(5)
+ zeroPages := createTestPages(0)
+ zeroPagesByWeight, _ := createTestPages(0).GroupBy("Weight", "asc")
+ fivePagesByWeight, _ := createTestPages(5).GroupBy("Weight", "asc")
+ ninePagesByWeight, _ := createTestPages(9).GroupBy("Weight", "asc")
+
+ for i, this := range []struct {
+ v1 interface{}
+ v2 interface{}
+ expect bool
+ }{
+ {nil, nil, true},
+ {"a", "b", true},
+ {"a", fivePages, false},
+ {fivePages, "a", false},
+ {fivePages, createTestPages(2), false},
+ {fivePages, fivePages, true},
+ {zeroPages, zeroPages, true},
+ {fivePagesByWeight, fivePagesByWeight, true},
+ {zeroPagesByWeight, fivePagesByWeight, false},
+ {zeroPagesByWeight, zeroPagesByWeight, true},
+ {fivePagesByWeight, fivePages, false},
+ {fivePagesByWeight, ninePagesByWeight, false},
+ } {
+ result := probablyEqualPageLists(this.v1, this.v2)
+
+ if result != this.expect {
+ t.Errorf("[%d] got %t but expected %t", i, result, this.expect)
+
+ }
+ }
+}
+
+func TestPaginationPage(t *testing.T) {
+ t.Parallel()
+ urlFactory := func(page int) string {
+ return fmt.Sprintf("page/%d/", page)
+ }
+
+ fivePages := createTestPages(7)
+ fivePagesFuzzyWordCount, _ := createTestPages(7).GroupBy("FuzzyWordCount", "asc")
+
+ p1, _ := newPaginatorFromPages(fivePages, 2, urlFactory)
+ p2, _ := newPaginatorFromPageGroups(fivePagesFuzzyWordCount, 2, urlFactory)
+
+ f1 := p1.pagers[0].First()
+ f2 := p2.pagers[0].First()
+
+ page11, _ := f1.page(1)
+ page1Nil, _ := f1.page(3)
+
+ page21, _ := f2.page(1)
+ page2Nil, _ := f2.page(3)
+
+ require.Equal(t, 3, page11.FuzzyWordCount())
+ require.Nil(t, page1Nil)
+
+ require.NotNil(t, page21)
+ require.Equal(t, 3, page21.FuzzyWordCount())
+ require.Nil(t, page2Nil)
+}
diff --git a/resources/page/permalinks.go b/resources/page/permalinks.go
new file mode 100644
index 000000000..98489231b
--- /dev/null
+++ b/resources/page/permalinks.go
@@ -0,0 +1,248 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+ "path/filepath"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "github.com/pkg/errors"
+
+ "github.com/gohugoio/hugo/helpers"
+)
+
+// PermalinkExpander holds permalin mappings per section.
+type PermalinkExpander struct {
+ // knownPermalinkAttributes maps :tags in a permalink specification to a
+ // function which, given a page and the tag, returns the resulting string
+ // to be used to replace that tag.
+ knownPermalinkAttributes map[string]pageToPermaAttribute
+
+ expanders map[string]func(Page) (string, error)
+
+ ps *helpers.PathSpec
+}
+
+// NewPermalinkExpander creates a new PermalinkExpander configured by the given
+// PathSpec.
+func NewPermalinkExpander(ps *helpers.PathSpec) (PermalinkExpander, error) {
+
+ p := PermalinkExpander{ps: ps}
+
+ p.knownPermalinkAttributes = map[string]pageToPermaAttribute{
+ "year": p.pageToPermalinkDate,
+ "month": p.pageToPermalinkDate,
+ "monthname": p.pageToPermalinkDate,
+ "day": p.pageToPermalinkDate,
+ "weekday": p.pageToPermalinkDate,
+ "weekdayname": p.pageToPermalinkDate,
+ "yearday": p.pageToPermalinkDate,
+ "section": p.pageToPermalinkSection,
+ "sections": p.pageToPermalinkSections,
+ "title": p.pageToPermalinkTitle,
+ "slug": p.pageToPermalinkSlugElseTitle,
+ "filename": p.pageToPermalinkFilename,
+ }
+
+ patterns := ps.Cfg.GetStringMapString("permalinks")
+ if patterns == nil {
+ return p, nil
+ }
+
+ e, err := p.parse(patterns)
+ if err != nil {
+ return p, err
+ }
+
+ p.expanders = e
+
+ return p, nil
+}
+
+// Expand expands the path in p according to the rules defined for the given key.
+// If no rules are found for the given key, an empty string is returned.
+func (l PermalinkExpander) Expand(key string, p Page) (string, error) {
+ expand, found := l.expanders[key]
+
+ if !found {
+ return "", nil
+ }
+
+ return expand(p)
+
+}
+
+func (l PermalinkExpander) parse(patterns map[string]string) (map[string]func(Page) (string, error), error) {
+
+ expanders := make(map[string]func(Page) (string, error))
+
+ for k, pattern := range patterns {
+ if !l.validate(pattern) {
+ return nil, &permalinkExpandError{pattern: pattern, err: errPermalinkIllFormed}
+ }
+
+ pattern := pattern
+ matches := attributeRegexp.FindAllStringSubmatch(pattern, -1)
+
+ callbacks := make([]pageToPermaAttribute, len(matches))
+ replacements := make([]string, len(matches))
+ for i, m := range matches {
+ replacement := m[0]
+ attr := replacement[1:]
+ replacements[i] = replacement
+ callback, ok := l.knownPermalinkAttributes[attr]
+
+ if !ok {
+ return nil, &permalinkExpandError{pattern: pattern, err: errPermalinkAttributeUnknown}
+ }
+
+ callbacks[i] = callback
+ }
+
+ expanders[k] = func(p Page) (string, error) {
+
+ if matches == nil {
+ return pattern, nil
+ }
+
+ newField := pattern
+
+ for i, replacement := range replacements {
+ attr := replacement[1:]
+ callback := callbacks[i]
+ newAttr, err := callback(p, attr)
+
+ if err != nil {
+ return "", &permalinkExpandError{pattern: pattern, err: err}
+ }
+
+ newField = strings.Replace(newField, replacement, newAttr, 1)
+
+ }
+
+ return newField, nil
+
+ }
+
+ }
+
+ return expanders, nil
+}
+
+// pageToPermaAttribute is the type of a function which, given a page and a tag
+// can return a string to go in that position in the page (or an error)
+type pageToPermaAttribute func(Page, string) (string, error)
+
+var attributeRegexp = regexp.MustCompile(`:\w+`)
+
+// validate determines if a PathPattern is well-formed
+func (l PermalinkExpander) validate(pp string) bool {
+ fragments := strings.Split(pp[1:], "/")
+ var bail = false
+ for i := range fragments {
+ if bail {
+ return false
+ }
+ if len(fragments[i]) == 0 {
+ bail = true
+ continue
+ }
+
+ matches := attributeRegexp.FindAllStringSubmatch(fragments[i], -1)
+ if matches == nil {
+ continue
+ }
+
+ for _, match := range matches {
+ k := strings.ToLower(match[0][1:])
+ if _, ok := l.knownPermalinkAttributes[k]; !ok {
+ return false
+ }
+ }
+ }
+ return true
+}
+
+type permalinkExpandError struct {
+ pattern string
+ err error
+}
+
+func (pee *permalinkExpandError) Error() string {
+ return fmt.Sprintf("error expanding %q: %s", string(pee.pattern), pee.err)
+}
+
+var (
+ errPermalinkIllFormed = errors.New("permalink ill-formed")
+ errPermalinkAttributeUnknown = errors.New("permalink attribute not recognised")
+)
+
+func (l PermalinkExpander) pageToPermalinkDate(p Page, dateField string) (string, error) {
+ // a Page contains a Node which provides a field Date, time.Time
+ switch dateField {
+ case "year":
+ return strconv.Itoa(p.Date().Year()), nil
+ case "month":
+ return fmt.Sprintf("%02d", int(p.Date().Month())), nil
+ case "monthname":
+ return p.Date().Month().String(), nil
+ case "day":
+ return fmt.Sprintf("%02d", p.Date().Day()), nil
+ case "weekday":
+ return strconv.Itoa(int(p.Date().Weekday())), nil
+ case "weekdayname":
+ return p.Date().Weekday().String(), nil
+ case "yearday":
+ return strconv.Itoa(p.Date().YearDay()), nil
+ }
+ //TODO: support classic strftime escapes too
+ // (and pass those through despite not being in the map)
+ panic("coding error: should not be here")
+}
+
+// pageToPermalinkTitle returns the URL-safe form of the title
+func (l PermalinkExpander) pageToPermalinkTitle(p Page, _ string) (string, error) {
+ return l.ps.URLize(p.Title()), nil
+}
+
+// pageToPermalinkFilename returns the URL-safe form of the filename
+func (l PermalinkExpander) pageToPermalinkFilename(p Page, _ string) (string, error) {
+ name := p.File().TranslationBaseName()
+ if name == "index" {
+ // Page bundles; the directory name will hopefully have a better name.
+ dir := strings.TrimSuffix(p.File().Dir(), helpers.FilePathSeparator)
+ _, name = filepath.Split(dir)
+ }
+
+ return l.ps.URLize(name), nil
+}
+
+// if the page has a slug, return the slug, else return the title
+func (l PermalinkExpander) pageToPermalinkSlugElseTitle(p Page, a string) (string, error) {
+ if p.Slug() != "" {
+ return l.ps.URLize(p.Slug()), nil
+ }
+ return l.pageToPermalinkTitle(p, a)
+}
+
+func (l PermalinkExpander) pageToPermalinkSection(p Page, _ string) (string, error) {
+ return p.Section(), nil
+}
+
+func (l PermalinkExpander) pageToPermalinkSections(p Page, _ string) (string, error) {
+ return p.CurrentSection().SectionsPath(), nil
+}
diff --git a/resources/page/permalinks_test.go b/resources/page/permalinks_test.go
new file mode 100644
index 000000000..d7af7e06d
--- /dev/null
+++ b/resources/page/permalinks_test.go
@@ -0,0 +1,180 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+ "sync"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/require"
+)
+
+// testdataPermalinks is used by a couple of tests; the expandsTo content is
+// subject to the data in simplePageJSON.
+var testdataPermalinks = []struct {
+ spec string
+ valid bool
+ expandsTo string
+}{
+ {":title", true, "spf13-vim-3.0-release-and-new-website"},
+ {"/:year-:month-:title", true, "/2012-04-spf13-vim-3.0-release-and-new-website"},
+ {"/:year/:yearday/:month/:monthname/:day/:weekday/:weekdayname/", true, "/2012/97/04/April/06/5/Friday/"}, // Dates
+ {"/:section/", true, "/blue/"}, // Section
+ {"/:title/", true, "/spf13-vim-3.0-release-and-new-website/"}, // Title
+ {"/:slug/", true, "/the-slug/"}, // Slug
+ {"/:filename/", true, "/test-page/"}, // Filename
+ // TODO(moorereason): need test scaffolding for this.
+ //{"/:sections/", false, "/blue/"}, // Sections
+
+ // Failures
+ {"/blog/:fred", false, ""},
+ {"/:year//:title", false, ""},
+}
+
+func TestPermalinkExpansion(t *testing.T) {
+ t.Parallel()
+
+ assert := require.New(t)
+
+ page := newTestPageWithFile("/test-page/index.md")
+ page.title = "Spf13 Vim 3.0 Release and new website"
+ d, _ := time.Parse("2006-01-02", "2012-04-06")
+ page.date = d
+ page.section = "blue"
+ page.slug = "The Slug"
+
+ for i, item := range testdataPermalinks {
+
+ msg := fmt.Sprintf("Test %d", i)
+
+ if !item.valid {
+ continue
+ }
+
+ permalinksConfig := map[string]string{
+ "posts": item.spec,
+ }
+
+ ps := newTestPathSpec()
+ ps.Cfg.Set("permalinks", permalinksConfig)
+
+ expander, err := NewPermalinkExpander(ps)
+ assert.NoError(err)
+
+ expanded, err := expander.Expand("posts", page)
+ assert.NoError(err)
+ assert.Equal(item.expandsTo, expanded, msg)
+
+ }
+}
+
+func TestPermalinkExpansionMultiSection(t *testing.T) {
+ t.Parallel()
+
+ assert := require.New(t)
+
+ page := newTestPage()
+ page.title = "Page Title"
+ d, _ := time.Parse("2006-01-02", "2012-04-06")
+ page.date = d
+ page.section = "blue"
+ page.slug = "The Slug"
+
+ permalinksConfig := map[string]string{
+ "posts": "/:slug",
+ "blog": "/:section/:year",
+ }
+
+ ps := newTestPathSpec()
+ ps.Cfg.Set("permalinks", permalinksConfig)
+
+ expander, err := NewPermalinkExpander(ps)
+ assert.NoError(err)
+
+ expanded, err := expander.Expand("posts", page)
+ assert.NoError(err)
+ assert.Equal("/the-slug", expanded)
+
+ expanded, err = expander.Expand("blog", page)
+ assert.NoError(err)
+ assert.Equal("/blue/2012", expanded)
+
+}
+
+func TestPermalinkExpansionConcurrent(t *testing.T) {
+ t.Parallel()
+
+ assert := require.New(t)
+
+ permalinksConfig := map[string]string{
+ "posts": "/:slug/",
+ }
+
+ ps := newTestPathSpec()
+ ps.Cfg.Set("permalinks", permalinksConfig)
+
+ expander, err := NewPermalinkExpander(ps)
+ assert.NoError(err)
+
+ var wg sync.WaitGroup
+
+ for i := 1; i < 20; i++ {
+ wg.Add(1)
+ go func(i int) {
+ defer wg.Done()
+ page := newTestPage()
+ for j := 1; j < 20; j++ {
+ page.slug = fmt.Sprintf("slug%d", i+j)
+ expanded, err := expander.Expand("posts", page)
+ assert.NoError(err)
+ assert.Equal(fmt.Sprintf("/%s/", page.slug), expanded)
+ }
+ }(i)
+ }
+
+ wg.Wait()
+}
+
+func BenchmarkPermalinkExpand(b *testing.B) {
+ page := newTestPage()
+ page.title = "Hugo Rocks"
+ d, _ := time.Parse("2006-01-02", "2019-02-28")
+ page.date = d
+
+ permalinksConfig := map[string]string{
+ "posts": "/:year-:month-:title",
+ }
+
+ ps := newTestPathSpec()
+ ps.Cfg.Set("permalinks", permalinksConfig)
+
+ expander, err := NewPermalinkExpander(ps)
+ if err != nil {
+ b.Fatal(err)
+ }
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ s, err := expander.Expand("posts", page)
+ if err != nil {
+ b.Fatal(err)
+ }
+ if s != "/2019-02-hugo-rocks" {
+ b.Fatal(s)
+ }
+
+ }
+}
diff --git a/resources/page/site.go b/resources/page/site.go
new file mode 100644
index 000000000..25df063f1
--- /dev/null
+++ b/resources/page/site.go
@@ -0,0 +1,53 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "html/template"
+ "time"
+
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/navigation"
+)
+
+// Site represents a site in the build. This is currently a very narrow interface,
+// but the actual implementation will be richer, see hugolib.SiteInfo.
+type Site interface {
+ Language() *langs.Language
+ RegularPages() Pages
+ Pages() Pages
+ IsServer() bool
+ ServerPort() int
+ Title() string
+ Sites() Sites
+ Hugo() hugo.Info
+ BaseURL() template.URL
+ Taxonomies() interface{}
+ LastChange() time.Time
+ Menus() navigation.Menus
+ Params() map[string]interface{}
+ Data() map[string]interface{}
+}
+
+// Sites represents an ordered list of sites (languages).
+type Sites []Site
+
+// First is a convenience method to get the first Site, i.e. the main language.
+func (s Sites) First() Site {
+ if len(s) == 0 {
+ return nil
+ }
+ return s[0]
+}
diff --git a/resources/page/testhelpers_test.go b/resources/page/testhelpers_test.go
new file mode 100644
index 000000000..c2bcca0a5
--- /dev/null
+++ b/resources/page/testhelpers_test.go
@@ -0,0 +1,554 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+ "html/template"
+ "os"
+ "path/filepath"
+ "time"
+
+ "github.com/bep/gitmap"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/resources/resource"
+ "github.com/spf13/viper"
+
+ "github.com/gohugoio/hugo/navigation"
+
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/related"
+
+ "github.com/gohugoio/hugo/source"
+)
+
+var (
+ _ resource.LengthProvider = (*testPage)(nil)
+ _ Page = (*testPage)(nil)
+)
+
+var relatedDocsHandler = NewRelatedDocsHandler(related.DefaultConfig)
+
+func newTestPage() *testPage {
+ return newTestPageWithFile("/a/b/c.md")
+}
+
+func newTestPageWithFile(filename string) *testPage {
+ filename = filepath.FromSlash(filename)
+ file := source.NewTestFile(filename)
+ return &testPage{
+ params: make(map[string]interface{}),
+ data: make(map[string]interface{}),
+ file: file,
+ }
+}
+
+func newTestPathSpec() *helpers.PathSpec {
+ return newTestPathSpecFor(viper.New())
+}
+
+func newTestPathSpecFor(cfg config.Provider) *helpers.PathSpec {
+ config.SetBaseTestDefaults(cfg)
+ fs := hugofs.NewMem(cfg)
+ s, err := helpers.NewPathSpec(fs, cfg)
+ if err != nil {
+ panic(err)
+ }
+ return s
+}
+
+type testPage struct {
+ description string
+ title string
+ linkTitle string
+
+ section string
+
+ content string
+
+ fuzzyWordCount int
+
+ path string
+
+ slug string
+
+ // Dates
+ date time.Time
+ lastMod time.Time
+ expiryDate time.Time
+ pubDate time.Time
+
+ weight int
+
+ params map[string]interface{}
+ data map[string]interface{}
+
+ file source.File
+}
+
+func (p *testPage) Aliases() []string {
+ panic("not implemented")
+}
+
+func (p *testPage) AllTranslations() Pages {
+ panic("not implemented")
+}
+
+func (p *testPage) AlternativeOutputFormats() OutputFormats {
+ panic("not implemented")
+}
+
+func (p *testPage) Author() Author {
+ return Author{}
+
+}
+func (p *testPage) Authors() AuthorList {
+ return nil
+}
+
+func (p *testPage) BaseFileName() string {
+ panic("not implemented")
+}
+
+func (p *testPage) BundleType() string {
+ panic("not implemented")
+}
+
+func (p *testPage) Content() (interface{}, error) {
+ panic("not implemented")
+}
+
+func (p *testPage) ContentBaseName() string {
+ panic("not implemented")
+}
+
+func (p *testPage) CurrentSection() Page {
+ panic("not implemented")
+}
+
+func (p *testPage) Data() interface{} {
+ return p.data
+}
+
+func (p *testPage) Sitemap() config.Sitemap {
+ return config.Sitemap{}
+}
+
+func (p *testPage) Layout() string {
+ return ""
+}
+func (p *testPage) Date() time.Time {
+ return p.date
+}
+
+func (p *testPage) Description() string {
+ return ""
+}
+
+func (p *testPage) Dir() string {
+ panic("not implemented")
+}
+
+func (p *testPage) Draft() bool {
+ panic("not implemented")
+}
+
+func (p *testPage) Eq(other interface{}) bool {
+ return p == other
+}
+
+func (p *testPage) ExpiryDate() time.Time {
+ return p.expiryDate
+}
+
+func (p *testPage) Ext() string {
+ panic("not implemented")
+}
+
+func (p *testPage) Extension() string {
+ panic("not implemented")
+}
+
+func (p *testPage) File() source.File {
+ return p.file
+}
+
+func (p *testPage) FileInfo() os.FileInfo {
+ panic("not implemented")
+}
+
+func (p *testPage) Filename() string {
+ panic("not implemented")
+}
+
+func (p *testPage) FirstSection() Page {
+ panic("not implemented")
+}
+
+func (p *testPage) FuzzyWordCount() int {
+ return p.fuzzyWordCount
+}
+
+func (p *testPage) GetPage(ref string) (Page, error) {
+ panic("not implemented")
+}
+
+func (p *testPage) GetParam(key string) interface{} {
+ panic("not implemented")
+}
+
+func (p *testPage) GetRelatedDocsHandler() *RelatedDocsHandler {
+ return relatedDocsHandler
+}
+
+func (p *testPage) GitInfo() *gitmap.GitInfo {
+ return nil
+}
+
+func (p *testPage) HasMenuCurrent(menuID string, me *navigation.MenuEntry) bool {
+ panic("not implemented")
+}
+
+func (p *testPage) HasShortcode(name string) bool {
+ panic("not implemented")
+}
+
+func (p *testPage) Hugo() hugo.Info {
+ panic("not implemented")
+}
+
+func (p *testPage) InSection(other interface{}) (bool, error) {
+ panic("not implemented")
+}
+
+func (p *testPage) IsAncestor(other interface{}) (bool, error) {
+ panic("not implemented")
+}
+
+func (p *testPage) IsDescendant(other interface{}) (bool, error) {
+ panic("not implemented")
+}
+
+func (p *testPage) IsDraft() bool {
+ return false
+}
+
+func (p *testPage) IsHome() bool {
+ panic("not implemented")
+}
+
+func (p *testPage) IsMenuCurrent(menuID string, inme *navigation.MenuEntry) bool {
+ panic("not implemented")
+}
+
+func (p *testPage) IsNode() bool {
+ panic("not implemented")
+}
+
+func (p *testPage) IsPage() bool {
+ panic("not implemented")
+}
+
+func (p *testPage) IsSection() bool {
+ panic("not implemented")
+}
+
+func (p *testPage) IsTranslated() bool {
+ panic("not implemented")
+}
+
+func (p *testPage) Keywords() []string {
+ return nil
+}
+
+func (p *testPage) Kind() string {
+ panic("not implemented")
+}
+
+func (p *testPage) Lang() string {
+ panic("not implemented")
+}
+
+func (p *testPage) Language() *langs.Language {
+ panic("not implemented")
+}
+
+func (p *testPage) LanguagePrefix() string {
+ return ""
+}
+
+func (p *testPage) Lastmod() time.Time {
+ return p.lastMod
+}
+
+func (p *testPage) Len() int {
+ return len(p.content)
+}
+
+func (p *testPage) LinkTitle() string {
+ if p.linkTitle == "" {
+ return p.title
+ }
+ return p.linkTitle
+}
+
+func (p *testPage) LogicalName() string {
+ panic("not implemented")
+}
+
+func (p *testPage) MediaType() media.Type {
+ panic("not implemented")
+}
+
+func (p *testPage) Menus() navigation.PageMenus {
+ return navigation.PageMenus{}
+}
+
+func (p *testPage) Name() string {
+ panic("not implemented")
+}
+
+func (p *testPage) Next() Page {
+ panic("not implemented")
+}
+
+func (p *testPage) NextInSection() Page {
+ return nil
+}
+
+func (p *testPage) NextPage() Page {
+ return nil
+}
+
+func (p *testPage) OutputFormats() OutputFormats {
+ panic("not implemented")
+}
+
+func (p *testPage) Pages() Pages {
+ panic("not implemented")
+}
+
+func (p *testPage) Paginate(seq interface{}, options ...interface{}) (*Pager, error) {
+ return nil, nil
+}
+
+func (p *testPage) Paginator(options ...interface{}) (*Pager, error) {
+ return nil, nil
+}
+
+func (p *testPage) Param(key interface{}) (interface{}, error) {
+ return resource.Param(p, nil, key)
+}
+
+func (p *testPage) Params() map[string]interface{} {
+ return p.params
+}
+
+func (p *testPage) Parent() Page {
+ panic("not implemented")
+}
+
+func (p *testPage) Path() string {
+ return p.path
+}
+
+func (p *testPage) Permalink() string {
+ panic("not implemented")
+}
+
+func (p *testPage) Plain() string {
+ panic("not implemented")
+}
+
+func (p *testPage) PlainWords() []string {
+ panic("not implemented")
+}
+
+func (p *testPage) Prev() Page {
+ panic("not implemented")
+}
+
+func (p *testPage) PrevInSection() Page {
+ return nil
+}
+
+func (p *testPage) PrevPage() Page {
+ return nil
+}
+
+func (p *testPage) PublishDate() time.Time {
+ return p.pubDate
+}
+
+func (p *testPage) RSSLink() template.URL {
+ return ""
+}
+
+func (p *testPage) RawContent() string {
+ panic("not implemented")
+}
+
+func (p *testPage) ReadingTime() int {
+ panic("not implemented")
+}
+
+func (p *testPage) Ref(argsm map[string]interface{}) (string, error) {
+ panic("not implemented")
+}
+
+func (p *testPage) RefFrom(argsm map[string]interface{}, source interface{}) (string, error) {
+ return "", nil
+}
+
+func (p *testPage) RelPermalink() string {
+ panic("not implemented")
+}
+
+func (p *testPage) RelRef(argsm map[string]interface{}) (string, error) {
+ panic("not implemented")
+}
+
+func (p *testPage) RelRefFrom(argsm map[string]interface{}, source interface{}) (string, error) {
+ return "", nil
+}
+
+func (p *testPage) Render(layout ...string) template.HTML {
+ panic("not implemented")
+}
+
+func (p *testPage) ResourceType() string {
+ panic("not implemented")
+}
+
+func (p *testPage) Resources() resource.Resources {
+ panic("not implemented")
+}
+
+func (p *testPage) Scratch() *maps.Scratch {
+ panic("not implemented")
+}
+
+func (p *testPage) RelatedKeywords(cfg related.IndexConfig) ([]related.Keyword, error) {
+ v, err := p.Param(cfg.Name)
+ if err != nil {
+ return nil, err
+ }
+
+ return cfg.ToKeywords(v)
+}
+
+func (p *testPage) Section() string {
+ return p.section
+}
+
+func (p *testPage) Sections() Pages {
+ panic("not implemented")
+}
+
+func (p *testPage) SectionsEntries() []string {
+ panic("not implemented")
+}
+
+func (p *testPage) SectionsPath() string {
+ panic("not implemented")
+}
+
+func (p *testPage) Site() Site {
+ panic("not implemented")
+}
+
+func (p *testPage) Sites() Sites {
+ panic("not implemented")
+}
+
+func (p *testPage) Slug() string {
+ return p.slug
+}
+
+func (p *testPage) String() string {
+ return p.path
+}
+
+func (p *testPage) Summary() template.HTML {
+ panic("not implemented")
+}
+
+func (p *testPage) TableOfContents() template.HTML {
+ panic("not implemented")
+}
+
+func (p *testPage) Title() string {
+ return p.title
+}
+
+func (p *testPage) TranslationBaseName() string {
+ panic("not implemented")
+}
+
+func (p *testPage) TranslationKey() string {
+ return p.path
+}
+
+func (p *testPage) Translations() Pages {
+ panic("not implemented")
+}
+
+func (p *testPage) Truncated() bool {
+ panic("not implemented")
+}
+
+func (p *testPage) Type() string {
+ return p.section
+}
+
+func (p *testPage) URL() string {
+ return ""
+}
+
+func (p *testPage) UniqueID() string {
+ panic("not implemented")
+}
+
+func (p *testPage) Weight() int {
+ return p.weight
+}
+
+func (p *testPage) WordCount() int {
+ panic("not implemented")
+}
+
+func createTestPages(num int) Pages {
+ pages := make(Pages, num)
+
+ for i := 0; i < num; i++ {
+ m := &testPage{
+ path: fmt.Sprintf("/x/y/z/p%d.md", i),
+ weight: 5,
+ fuzzyWordCount: i + 2, // magic
+ }
+
+ if i%2 == 0 {
+ m.weight = 10
+ }
+ pages[i] = m
+
+ }
+
+ return pages
+}
diff --git a/resources/page/weighted.go b/resources/page/weighted.go
new file mode 100644
index 000000000..0937b3f86
--- /dev/null
+++ b/resources/page/weighted.go
@@ -0,0 +1,140 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+ "sort"
+
+ "github.com/gohugoio/hugo/common/collections"
+)
+
+var (
+ _ collections.Slicer = WeightedPage{}
+)
+
+// WeightedPages is a list of Pages with their corresponding (and relative) weight
+// [{Weight: 30, Page: *1}, {Weight: 40, Page: *2}]
+type WeightedPages []WeightedPage
+
+// Page will return the Page (of Kind taxonomyList) that represents this set
+// of pages. This method will panic if p is empty, as that should never happen.
+func (p WeightedPages) Page() Page {
+ if len(p) == 0 {
+ panic("WeightedPages is empty")
+ }
+
+ first := p[0]
+
+ // TODO(bep) fix tests
+ if first.getOwner == nil {
+ return nil
+ }
+
+ return first.getOwner()
+}
+
+// A WeightedPage is a Page with a weight.
+type WeightedPage struct {
+ Weight int
+ Page
+
+ // A callback used to fetch the owning Page. This avoids having to do
+ // manual .Site.GetPage lookups. It is implemented in this roundabout way
+ // because we cannot add additional state to the WeightedPages slice
+ // without breaking lots of templates in the wild.
+ getOwner func() Page
+}
+
+func NewWeightedPage(weight int, p Page, getOwner func() Page) WeightedPage {
+ return WeightedPage{Weight: weight, Page: p, getOwner: getOwner}
+}
+
+func (w WeightedPage) String() string {
+ return fmt.Sprintf("WeightedPage(%d,%q)", w.Weight, w.Page.Title())
+}
+
+// Slice is not meant to be used externally. It's a bridge function
+// for the template functions. See collections.Slice.
+func (p WeightedPage) Slice(in interface{}) (interface{}, error) {
+ switch items := in.(type) {
+ case WeightedPages:
+ return items, nil
+ case []interface{}:
+ weighted := make(WeightedPages, len(items))
+ for i, v := range items {
+ g, ok := v.(WeightedPage)
+ if !ok {
+ return nil, fmt.Errorf("type %T is not a WeightedPage", v)
+ }
+ weighted[i] = g
+ }
+ return weighted, nil
+ default:
+ return nil, fmt.Errorf("invalid slice type %T", items)
+ }
+}
+
+// Pages returns the Pages in this weighted page set.
+func (wp WeightedPages) Pages() Pages {
+ pages := make(Pages, len(wp))
+ for i := range wp {
+ pages[i] = wp[i].Page
+ }
+ return pages
+}
+
+// Prev returns the previous Page relative to the given Page in
+// this weighted page set.
+func (wp WeightedPages) Prev(cur Page) Page {
+ for x, c := range wp {
+ if c.Page == cur {
+ if x == 0 {
+ return wp[len(wp)-1].Page
+ }
+ return wp[x-1].Page
+ }
+ }
+ return nil
+}
+
+// Next returns the next Page relative to the given Page in
+// this weighted page set.
+func (wp WeightedPages) Next(cur Page) Page {
+ for x, c := range wp {
+ if c.Page == cur {
+ if x < len(wp)-1 {
+ return wp[x+1].Page
+ }
+ return wp[0].Page
+ }
+ }
+ return nil
+}
+
+func (wp WeightedPages) Len() int { return len(wp) }
+func (wp WeightedPages) Swap(i, j int) { wp[i], wp[j] = wp[j], wp[i] }
+
+// Sort stable sorts this weighted page set.
+func (wp WeightedPages) Sort() { sort.Stable(wp) }
+
+// Count returns the number of pages in this weighted page set.
+func (wp WeightedPages) Count() int { return len(wp) }
+
+func (wp WeightedPages) Less(i, j int) bool {
+ if wp[i].Weight == wp[j].Weight {
+ return DefaultPageSort(wp[i].Page, wp[j].Page)
+ }
+ return wp[i].Weight < wp[j].Weight
+}